diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 27e7b6444..0a3a86f0f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -114,8 +114,8 @@ jobs: # # We want the four parts to have similar runtimes. BENCH_INCLUDE_EXCLUDE_OPTS: [ - "--include cargo-0.60.0,cargo-0.87.1,stm32f4-0.14.0,webrender-2022", - "--exclude cargo-0.60.0,cargo-0.87.1,stm32f4-0.14.0,webrender-2022", + "--include cargo-0.87.1,stm32f4-0.14.0,webrender-2022", + "--exclude cargo-0.87.1,stm32f4-0.14.0,webrender-2022", ] PROFILES: [ "Check,Debug,Doc", diff --git a/collector/compile-benchmarks/README.md b/collector/compile-benchmarks/README.md index 19b531100..552694ea1 100644 --- a/collector/compile-benchmarks/README.md +++ b/collector/compile-benchmarks/README.md @@ -18,8 +18,6 @@ They mostly consist of real-world crates. - **bitmaps-3.2.1**: A bitmaps implementation. Stresses the compiler's trait handling by implementing a trait `Bits` for the type `BitsImpl` for every `N` value from 1 to 1024. -- **cargo-0.60.0**: The Rust package manager. A large program, and an important - part of the Rust ecosystem. - **cargo-0.87.1**: The Rust package manager. A large program, and an important part of the Rust ecosystem. - **clap-3.1.6**: A command line argument parser library. A crate used by many diff --git a/collector/compile-benchmarks/REUSE.toml b/collector/compile-benchmarks/REUSE.toml index dd6a62640..bc9879f55 100644 --- a/collector/compile-benchmarks/REUSE.toml +++ b/collector/compile-benchmarks/REUSE.toml @@ -22,11 +22,6 @@ path = "bitmaps-3.2.1/**" SPDX-License-Identifier = "MPL-2.0" SPDX-FileCopyrightText = "Bodil Stokke" -[[annotations]] -path = "cargo-0.60.0/**" -SPDX-FileCopyrightText = "The Rust Project Developers (see https://thanks.rust-lang.org)" -SPDX-License-Identifier = "MIT OR Apache-2.0" - [[annotations]] path = "cargo-0.87.1/**" SPDX-FileCopyrightText = "The Rust Project Developers (see https://thanks.rust-lang.org)" diff --git a/collector/compile-benchmarks/cargo-0.60.0/0-println.patch b/collector/compile-benchmarks/cargo-0.60.0/0-println.patch deleted file mode 100644 index 5ed366588..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/0-println.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs -index cc1c9874..4abbd75e 100644 ---- a/src/cargo/sources/path.rs -+++ b/src/cargo/sources/path.rs -@@ -54,6 +54,7 @@ impl<'cfg> PathSource<'cfg> { - } - - pub fn preload_with(&mut self, pkg: Package) { -+ println!("testing"); - assert!(!self.updated); - assert!(!self.recursive); - assert!(self.packages.is_empty()); diff --git a/collector/compile-benchmarks/cargo-0.60.0/CHANGELOG.md b/collector/compile-benchmarks/cargo-0.60.0/CHANGELOG.md deleted file mode 100644 index fb9112ec9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/CHANGELOG.md +++ /dev/null @@ -1,2411 +0,0 @@ -# Changelog - -## Cargo 1.59 (2022-02-24) -[7f08ace4...HEAD](https://github.com/rust-lang/cargo/compare/7f08ace4...HEAD) - -### Added - -### Changed - -### Fixed - -### Nightly only -- Added the `--crate-type` flag to `cargo rustc`. - [#10093](https://github.com/rust-lang/cargo/pull/10093) - - -## Cargo 1.58 (2022-01-13) -[b2e52d7c...rust-1.58.0](https://github.com/rust-lang/cargo/compare/b2e52d7c...rust-1.58.0) - -### Added - -- Added `rust_version` field to package data in `cargo metadata`. - [#9967](https://github.com/rust-lang/cargo/pull/9967) -- Added `--message-format` option to `cargo install`. - [#10107](https://github.com/rust-lang/cargo/pull/10107) - -### Changed - -- A warning is now shown when an alias shadows an external command. - [#10082](https://github.com/rust-lang/cargo/pull/10082) -- Updated curl to 7.80.0. - [#10040](https://github.com/rust-lang/cargo/pull/10040) - [#10106](https://github.com/rust-lang/cargo/pull/10106) - -### Fixed - -- Doctests now include rustc-link-args from build scripts. - [#9916](https://github.com/rust-lang/cargo/pull/9916) -- Fixed `cargo tree` entering an infinite loop with cyclical dev-dependencies. - Fixed an edge case where the resolver would fail to handle a cyclical dev-dependency with a feature. - [#10103](https://github.com/rust-lang/cargo/pull/10103) -- Fixed `cargo clean -p` when the directory path contains glob characters. - [#10072](https://github.com/rust-lang/cargo/pull/10072) -- Fixed debug builds of `cargo` which could panic when downloading a crate - when the server has a redirect with a non-empty body. - [#10048](https://github.com/rust-lang/cargo/pull/10048) - -### Nightly only - -- Make future-incompat-report output more user-friendly. - [#9953](https://github.com/rust-lang/cargo/pull/9953) -- Added support to scrape code examples from the `examples` directory to be included in the documentation. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#scrape-examples) - [#9525](https://github.com/rust-lang/cargo/pull/9525) - [#10037](https://github.com/rust-lang/cargo/pull/10037) - [#10017](https://github.com/rust-lang/cargo/pull/10017) -- Fixed `cargo report future-incompatibilities` to check stdout if it supports color. - [#10024](https://github.com/rust-lang/cargo/pull/10024) - -## Cargo 1.57 (2021-12-02) -[18751dd3...rust-1.57.0](https://github.com/rust-lang/cargo/compare/18751dd3...rust-1.57.0) - -### Added - -- ๐ŸŽ‰ Added custom named profiles. This also changes the `test` and `bench` - profiles to inherit their settings from `dev` and `release`, and Cargo will - now only use a single profile during a given command instead of using - different profiles for dependencies and cargo-targets. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#custom-profiles) - [#9943](https://github.com/rust-lang/cargo/pull/9943) -- The `rev` option for a git dependency now supports git references that start - with `refs/`. An example where this can be used is to depend on a pull - request from a service like GitHub before it is merged. - [#9859](https://github.com/rust-lang/cargo/pull/9859) -- Added `path_in_vcs` field to the `.cargo_vcs_info.json` file. - [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-package.html#cargo_vcs_infojson-format) - [#9866](https://github.com/rust-lang/cargo/pull/9866) - -### Changed - -- โ— `RUSTFLAGS` is no longer set for build scripts. This change was made in - 1.55, but the release notes did not highlight this change. Build scripts - should use `CARGO_ENCODED_RUSTFLAGS` instead. See the - [documentation](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts) - for more details. -- The `cargo version` command now includes some extra information. - [#9968](https://github.com/rust-lang/cargo/pull/9968) -- Updated libgit2 to 1.3 which brings in a number of fixes and changes to git - handling. - [#9963](https://github.com/rust-lang/cargo/pull/9963) - [#9988](https://github.com/rust-lang/cargo/pull/9988) -- Shell completions now include shorthand b/r/c/d subcommands. - [#9951](https://github.com/rust-lang/cargo/pull/9951) -- `cargo update --precise` now allows specifying a version without semver - metadata (stuff after `+` in the version number). - [#9945](https://github.com/rust-lang/cargo/pull/9945) -- zsh completions now complete `--example` names. - [#9939](https://github.com/rust-lang/cargo/pull/9939) -- The progress bar now differentiates when building unittests. - [#9934](https://github.com/rust-lang/cargo/pull/9934) -- Some backwards-compatibility support for invalid TOML syntax has been removed. - [#9932](https://github.com/rust-lang/cargo/pull/9932) -- Reverted the change from 1.55 that triggered an error for dependency - specifications that did not include any fields. - [#9911](https://github.com/rust-lang/cargo/pull/9911) - -### Fixed - -- Removed a log message (from `CARGO_LOG`) that may leak tokens. - [#9873](https://github.com/rust-lang/cargo/pull/9873) -- `cargo fix` will now avoid writing fixes to the global registry cache. - [#9938](https://github.com/rust-lang/cargo/pull/9938) -- Fixed `-Z help` CLI option when used with a shorthand alias (b/c/r/d). - [#9933](https://github.com/rust-lang/cargo/pull/9933) - - -### Nightly only - - -## Cargo 1.56 (2021-10-21) -[cebef295...rust-1.56.0](https://github.com/rust-lang/cargo/compare/cebef295...rust-1.56.0) - -### Added - -- ๐ŸŽ‰ Cargo now supports the 2021 edition. - More information may be found in the [edition - guide](https://doc.rust-lang.org/nightly/edition-guide/rust-2021/index.html). - [#9800](https://github.com/rust-lang/cargo/pull/9800) -- ๐ŸŽ‰ Added the - [`rust-version`](https://doc.rust-lang.org/nightly/cargo/reference/manifest.html#the-rust-version-field) - field to `Cargo.toml` to specify the minimum supported Rust version, and the - `--ignore-rust-version` command line option to override it. - [#9732](https://github.com/rust-lang/cargo/pull/9732) -- Added the `[env]` table to config files to specify environment variables to - set. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#env) - [#9411](https://github.com/rust-lang/cargo/pull/9411) -- `[patch]` tables may now be specified in config files. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#patch) - [#9839](https://github.com/rust-lang/cargo/pull/9839) -- `cargo doc` now supports the `--example` and `--examples` flags. - [#9808](https://github.com/rust-lang/cargo/pull/9808) -- ๐ŸŽ‰ Build scripts can now pass additional linker arguments for binaries or all - linkable targets. [docs](https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#outputs-of-the-build-script) - [#9557](https://github.com/rust-lang/cargo/pull/9557) -- Added support for the `-p` flag for `cargo publish` to publish a specific - package in a workspace. `cargo package` also now supports `-p` and - `--workspace`. - [#9559](https://github.com/rust-lang/cargo/pull/9559) -- Added documentation about third-party registries. - [#9830](https://github.com/rust-lang/cargo/pull/9830) -- Added the `{sha256-checksum}` placeholder for URLs in a registry `config.json`. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#index-format) - [#9801](https://github.com/rust-lang/cargo/pull/9801) -- Added a warning when a dependency does not have a library. - [#9771](https://github.com/rust-lang/cargo/pull/9771) - -### Changed - -- Doc tests now support the `-q` flag to show terse test output. - [#9730](https://github.com/rust-lang/cargo/pull/9730) -- `features` used in a `[replace]` table now issues a warning, as they are ignored. - [#9681](https://github.com/rust-lang/cargo/pull/9681) -- Changed so that only `wasm32-unknown-emscripten` executables are built - without a hash in the filename. Previously it was all `wasm32` targets. - Additionally, all `apple` binaries are now built with a hash in the - filename. This allows multiple copies to be cached at once, and matches the - behavior on other platforms (except `msvc`). - [#9653](https://github.com/rust-lang/cargo/pull/9653) -- `cargo new` now generates an example that doesn't generate a warning with - clippy. - [#9796](https://github.com/rust-lang/cargo/pull/9796) -- `cargo fix --edition` now only applies edition-specific lints. - [#9846](https://github.com/rust-lang/cargo/pull/9846) -- Improve resolver message to include dependency requirements. - [#9827](https://github.com/rust-lang/cargo/pull/9827) -- `cargo fix` now has more debug logging available with the `CARGO_LOG` - environment variable. - [#9831](https://github.com/rust-lang/cargo/pull/9831) -- Changed `cargo fix --edition` to emit a warning when on the latest stable - edition when running on stable instead of generating an error. - [#9792](https://github.com/rust-lang/cargo/pull/9792) -- `cargo install` will now determine all of the packages to install before - starting the installation, which should help with reporting errors without - partially installing. - [#9793](https://github.com/rust-lang/cargo/pull/9793) -- The resolver report for `cargo fix --edition` now includes differences for - dev-dependencies. - [#9803](https://github.com/rust-lang/cargo/pull/9803) -- `cargo fix` will now show better diagnostics for abnormal errors from `rustc`. - [#9799](https://github.com/rust-lang/cargo/pull/9799) -- Entries in `cargo --list` are now deduplicated. - [#9773](https://github.com/rust-lang/cargo/pull/9773) -- Aliases are now included in `cargo --list`. - [#9764](https://github.com/rust-lang/cargo/pull/9764) - -### Fixed - -- Fixed panic with build-std of a proc-macro. - [#9834](https://github.com/rust-lang/cargo/pull/9834) -- Fixed running `cargo` recursively from proc-macros while running `cargo fix`. - [#9818](https://github.com/rust-lang/cargo/pull/9818) -- Return an error instead of a stack overflow for command alias loops. - [#9791](https://github.com/rust-lang/cargo/pull/9791) -- Updated to curl 7.79.1, which will hopefully fix intermittent http2 errors. - [#9937](https://github.com/rust-lang/cargo/pull/9937) - -### Nightly only - -- Added `[future-incompat-report]` config section. - [#9774](https://github.com/rust-lang/cargo/pull/9774) -- Fixed value-after-table error with custom named profiles. - [#9789](https://github.com/rust-lang/cargo/pull/9789) -- Added the `different-binary-name` feature to support specifying a - non-rust-identifier for a binary name. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#different-binary-name) - [#9627](https://github.com/rust-lang/cargo/pull/9627) -- Added a profile option to select the codegen backend. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#codegen-backend) - [#9118](https://github.com/rust-lang/cargo/pull/9118) - - -## Cargo 1.55 (2021-09-09) -[aa8b0929...rust-1.55.0](https://github.com/rust-lang/cargo/compare/aa8b0929...rust-1.55.0) - -### Added - -- The package definition in `cargo metadata` now includes the `"default_run"` - field from the manifest. - [#9550](https://github.com/rust-lang/cargo/pull/9550) -- โ— Build scripts now have access to the following environment variables: - `RUSTC_WRAPPER`, `RUSTC_WORKSPACE_WRAPPER`, `CARGO_ENCODED_RUSTFLAGS`. - `RUSTFLAGS` is no longer set for build scripts; they should use - `CARGO_ENCODED_RUSTFLAGS` instead. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts) - [#9601](https://github.com/rust-lang/cargo/pull/9601) -- Added `cargo d` as an alias for `cargo doc`. - [#9680](https://github.com/rust-lang/cargo/pull/9680) -- Added `{lib}` to the `cargo tree --format` option to display the library - name of a package. - [#9663](https://github.com/rust-lang/cargo/pull/9663) -- Added `members_mut` method to the `Workspace` API. - [#9547](https://github.com/rust-lang/cargo/pull/9547) - -### Changed - -- If a build command does not match any targets when using the - `--all-targets`, `--bins`, `--tests`, `--examples`, or `--benches` flags, a - warning is now displayed to inform you that there were no matching targets. - [#9549](https://github.com/rust-lang/cargo/pull/9549) -- The way `cargo init` detects whether or not existing source files represent - a binary or library has been changed to respect the command-line flags - instead of trying to guess which type it is. - [#9522](https://github.com/rust-lang/cargo/pull/9522) -- Registry names are now displayed instead of registry URLs when possible. - [#9632](https://github.com/rust-lang/cargo/pull/9632) -- Duplicate compiler diagnostics are no longer shown. This can often happen - with `cargo test` which builds multiple copies of the same code in parallel. - This also updates the warning summary to provide more context. - [#9675](https://github.com/rust-lang/cargo/pull/9675) -- The output for warnings or errors is now improved to be leaner, cleaner, and - show more context. - [#9655](https://github.com/rust-lang/cargo/pull/9655) -- Network send errors are now treated as "spurious" which means they will be retried. - [#9695](https://github.com/rust-lang/cargo/pull/9695) -- Git keys (`branch`, `tag`, `rev`) on a non-git dependency are now an error. - Additionally, specifying both `git` and `path` is now an error. - [#9689](https://github.com/rust-lang/cargo/pull/9689) -- Specifying a dependency without any keys is now an error. - [#9686](https://github.com/rust-lang/cargo/pull/9686) -- The resolver now prefers to use `[patch]` table entries of dependencies when - possible. - [#9639](https://github.com/rust-lang/cargo/pull/9639) -- Package name typo errors in dependencies are now displayed aligned with the - original to help make it easier to see the difference. - [#9665](https://github.com/rust-lang/cargo/pull/9665) -- Windows platforms may now warn on environment variables that have the wrong case. - [#9654](https://github.com/rust-lang/cargo/pull/9654) -- `features` used in a `[patch]` table now issues a warning, as they are ignored. - [#9666](https://github.com/rust-lang/cargo/pull/9666) -- The `target` directory is now excluded from content indexing on Windows. - [#9635](https://github.com/rust-lang/cargo/pull/9635) -- When `Cargo.toml` is not found, the error message now detects if it was - misnamed with a lowercase `c` to suggest the correct form. - [#9607](https://github.com/rust-lang/cargo/pull/9607) -- Building `diesel` with the new resolver displays a compatibility notice. - [#9602](https://github.com/rust-lang/cargo/pull/9602) -- Updated the `opener` dependency, which handles opening a web browser, which - includes several changes, such as new behavior when run on WSL, and using - the system `xdg-open` on Linux. - [#9583](https://github.com/rust-lang/cargo/pull/9583) -- Updated to libcurl 7.78. - [#9809](https://github.com/rust-lang/cargo/pull/9809) - [#9810](https://github.com/rust-lang/cargo/pull/9810) - -### Fixed - -- Fixed dep-info files including non-local build script paths. - [#9596](https://github.com/rust-lang/cargo/pull/9596) -- Handle "jobs = 0" case in cargo config files - [#9584](https://github.com/rust-lang/cargo/pull/9584) -- Implement warning for ignored trailing arguments after `--` - [#9561](https://github.com/rust-lang/cargo/pull/9561) -- Fixed rustc/rustdoc config values to be config-relative. - [#9566](https://github.com/rust-lang/cargo/pull/9566) -- `cargo fix` now supports rustc's suggestions with multiple spans. - [#9567](https://github.com/rust-lang/cargo/pull/9567) -- `cargo fix` now fixes each target serially instead of in parallel to avoid - problems with fixing the same file concurrently. - [#9677](https://github.com/rust-lang/cargo/pull/9677) -- Changes to the target `linker` config value now trigger a rebuild. - [#9647](https://github.com/rust-lang/cargo/pull/9647) -- Git unstaged deleted files are now ignored when using the `--allow-dirty` - flag with `cargo publish` or `cargo package`. - [#9645](https://github.com/rust-lang/cargo/pull/9645) - -### Nightly only - -- Enabled support for `cargo fix --edition` for 2021. - [#9588](https://github.com/rust-lang/cargo/pull/9588) -- Several changes to named profiles. - [#9685](https://github.com/rust-lang/cargo/pull/9685) -- Extended instructions on what to do when running `cargo fix --edition` on - the 2021 edition. - [#9694](https://github.com/rust-lang/cargo/pull/9694) -- Multiple updates to error messages using nightly features to help better - explain the situation. - [#9657](https://github.com/rust-lang/cargo/pull/9657) -- Adjusted the edition 2021 resolver diff report. - [#9649](https://github.com/rust-lang/cargo/pull/9649) -- Fixed error using `cargo doc --open` with `doc.extern-map`. - [#9531](https://github.com/rust-lang/cargo/pull/9531) -- Unified weak and namespaced features. - [#9574](https://github.com/rust-lang/cargo/pull/9574) -- Various updates to future-incompatible reporting. - [#9606](https://github.com/rust-lang/cargo/pull/9606) -- `[env]` environment variables are not allowed to set vars set by Cargo. - [#9579](https://github.com/rust-lang/cargo/pull/9579) - -## Cargo 1.54 (2021-07-29) -[4369396c...rust-1.54.0](https://github.com/rust-lang/cargo/compare/4369396c...rust-1.54.0) - -### Added - -- Fetching from a git repository (such as the crates.io index) now displays - the network transfer rate. - [#9395](https://github.com/rust-lang/cargo/pull/9395) -- Added `--prune` option for `cargo tree` to limit what is displayed. - [#9520](https://github.com/rust-lang/cargo/pull/9520) -- Added `--depth` option for `cargo tree` to limit what is displayed. - [#9499](https://github.com/rust-lang/cargo/pull/9499) -- Added `cargo tree -e no-proc-macro` to hide procedural macro dependencies. - [#9488](https://github.com/rust-lang/cargo/pull/9488) -- Added `doc.browser` config option to set which browser to open with `cargo doc --open`. - [#9473](https://github.com/rust-lang/cargo/pull/9473) -- Added `CARGO_TARGET_TMPDIR` environment variable set for integration tests & - benches. This provides a temporary or "scratch" directory in the `target` - directory for tests and benches to use. - [#9375](https://github.com/rust-lang/cargo/pull/9375) - -### Changed - -- `--features` CLI flags now provide typo suggestions with the new feature resolver. - [#9420](https://github.com/rust-lang/cargo/pull/9420) -- Cargo now uses a new parser for SemVer versions. This should behave mostly - the same as before with some minor exceptions where invalid syntax for - version requirements is now rejected. - [#9508](https://github.com/rust-lang/cargo/pull/9508) -- Mtime handling of `.crate` published packages has changed slightly to avoid - mtime values of 0. This was causing problems with lldb which refused to read - those files. - [#9517](https://github.com/rust-lang/cargo/pull/9517) -- Improved performance of git status check in `cargo package`. - [#9478](https://github.com/rust-lang/cargo/pull/9478) -- `cargo new` with fossil now places the ignore settings in the new repository - instead of using `fossil settings` to set them globally. This also includes - several other cleanups to make it more consistent with other VCS - configurations. - [#9469](https://github.com/rust-lang/cargo/pull/9469) -- `rustc-cdylib-link-arg` applying transitively displays a warning that this - was not intended, and may be an error in the future. - [#9563](https://github.com/rust-lang/cargo/pull/9563) - -### Fixed - -- Fixed `package.exclude` in `Cargo.toml` using inverted exclusions - (`!somefile`) when not in a git repository or when vendoring a dependency. - [#9186](https://github.com/rust-lang/cargo/pull/9186) -- Dep-info files now adjust build script `rerun-if-changed` paths to be - absolute paths. - [#9421](https://github.com/rust-lang/cargo/pull/9421) -- Fixed a bug when with resolver = "1" non-virtual package was allowing - unknown features. - [#9437](https://github.com/rust-lang/cargo/pull/9437) -- Fixed an issue with the index cache mishandling versions that only - differed in build metadata (such as `110.0.0` and `110.0.0+1.1.0f`). - [#9476](https://github.com/rust-lang/cargo/pull/9476) -- Fixed `cargo install` with a semver metadata version. - [#9467](https://github.com/rust-lang/cargo/pull/9467) - -### Nightly only - -- Added `report` subcommand, and changed `cargo - describe-future-incompatibilitie` to `cargo report - future-incompatibilities`. - [#9438](https://github.com/rust-lang/cargo/pull/9438) -- Added a `[host]` table to the config files to be able to set build flags for - host target. Also added `target-applies-to-host` to control how the - `[target]` tables behave. - [#9322](https://github.com/rust-lang/cargo/pull/9322) -- Added some validation to build script `rustc-link-arg-*` instructions to - return an error if the target doesn't exist. - [#9523](https://github.com/rust-lang/cargo/pull/9523) -- Added `cargo:rustc-link-arg-bin` instruction for build scripts. - [#9486](https://github.com/rust-lang/cargo/pull/9486) - - -## Cargo 1.53 (2021-06-17) -[90691f2b...rust-1.53.0](https://github.com/rust-lang/cargo/compare/90691f2b...rust-1.53.0) - -### Added - -### Changed -- ๐Ÿ”ฅ Cargo now supports git repositories where the default `HEAD` branch is not - "master". This also includes a switch to the version 3 `Cargo.lock` format - which can handle default branches correctly. - [#9133](https://github.com/rust-lang/cargo/pull/9133) - [#9397](https://github.com/rust-lang/cargo/pull/9397) - [#9384](https://github.com/rust-lang/cargo/pull/9384) - [#9392](https://github.com/rust-lang/cargo/pull/9392) -- ๐Ÿ”ฅ macOS targets now default to `unpacked` split-debuginfo. - [#9298](https://github.com/rust-lang/cargo/pull/9298) -- โ— The `authors` field is no longer included in `Cargo.toml` for new - projects. - [#9282](https://github.com/rust-lang/cargo/pull/9282) -- `cargo update` may now work with the `--offline` flag. - [#9279](https://github.com/rust-lang/cargo/pull/9279) -- `cargo doc` will now erase the `doc` directory when switching between - different toolchain versions. There are shared, unversioned files (such as - the search index) that can become broken when using different versions. - [#8640](https://github.com/rust-lang/cargo/pull/8640) - [#9404](https://github.com/rust-lang/cargo/pull/9404) -- Improved error messages when path dependency/workspace member is missing. - [#9368](https://github.com/rust-lang/cargo/pull/9368) - -### Fixed -- Fixed `cargo doc` detecting if the documentation needs to be rebuilt when - changing some settings such as features. - [#9419](https://github.com/rust-lang/cargo/pull/9419) -- `cargo doc` now deletes the output directory for the package before running - rustdoc to clear out any stale files. - [#9419](https://github.com/rust-lang/cargo/pull/9419) -- Fixed the `-C metadata` value to always include all information for all - builds. Previously, in some situations, the hash only included the package - name and version. This fixes some issues, such as incremental builds with - split-debuginfo on macOS corrupting the incremental cache in some cases. - [#9418](https://github.com/rust-lang/cargo/pull/9418) -- Fixed man pages not working on Windows if `man` is in `PATH`. - [#9378](https://github.com/rust-lang/cargo/pull/9378) -- The `rustc` cache is now aware of `RUSTC_WRAPPER` and `RUSTC_WORKSPACE_WRAPPER`. - [#9348](https://github.com/rust-lang/cargo/pull/9348) -- Track the `CARGO` environment variable in the rebuild fingerprint if the - code uses `env!("CARGO")`. - [#9363](https://github.com/rust-lang/cargo/pull/9363) - -### Nightly only -- Fixed config includes not working. - [#9299](https://github.com/rust-lang/cargo/pull/9299) -- Emit note when `--future-incompat-report` had nothing to report. - [#9263](https://github.com/rust-lang/cargo/pull/9263) -- Error messages for nightly features flags (like `-Z` and `cargo-features`) - now provides more information. - [#9290](https://github.com/rust-lang/cargo/pull/9290) -- Added the ability to set the target for an individual package in `Cargo.toml`. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#per-package-target) - [#9030](https://github.com/rust-lang/cargo/pull/9030) -- Fixed build-std updating the index on every build. - [#9393](https://github.com/rust-lang/cargo/pull/9393) -- `-Z help` now displays all the `-Z` options. - [#9369](https://github.com/rust-lang/cargo/pull/9369) -- Added `-Zallow-features` to specify which nightly features are allowed to be used. - [#9283](https://github.com/rust-lang/cargo/pull/9283) -- Added `cargo config` subcommand. - [#9302](https://github.com/rust-lang/cargo/pull/9302) - -## Cargo 1.52 (2021-05-06) -[34170fcd...rust-1.52.0](https://github.com/rust-lang/cargo/compare/34170fcd...rust-1.52.0) - -### Added -- Added the `"manifest_path"` field to JSON messages for a package. - [#9022](https://github.com/rust-lang/cargo/pull/9022) - [#9247](https://github.com/rust-lang/cargo/pull/9247) - -### Changed -- Build scripts are now forbidden from setting `RUSTC_BOOTSTRAP` on stable. - [#9181](https://github.com/rust-lang/cargo/pull/9181) - [#9385](https://github.com/rust-lang/cargo/pull/9385) -- crates.io now supports SPDX 3.11 licenses. - [#9209](https://github.com/rust-lang/cargo/pull/9209) -- An error is now reported if `CARGO_TARGET_DIR` is an empty string. - [#8939](https://github.com/rust-lang/cargo/pull/8939) -- Doc tests now pass the `--message-format` flag into the test so that the - "short" format can now be used for doc tests. - [#9128](https://github.com/rust-lang/cargo/pull/9128) -- `cargo test` now prints a clearer indicator of which target is currently running. - [#9195](https://github.com/rust-lang/cargo/pull/9195) -- The `CARGO_TARGET_` environment variable will now issue a warning if - it is using lowercase letters. - [#9169](https://github.com/rust-lang/cargo/pull/9169) - -### Fixed -- Fixed publication of packages with metadata and resolver fields in `Cargo.toml`. - [#9300](https://github.com/rust-lang/cargo/pull/9300) - [#9304](https://github.com/rust-lang/cargo/pull/9304) -- Fixed logic for determining prefer-dynamic for a dylib which differed in a - workspace vs a single package. - [#9252](https://github.com/rust-lang/cargo/pull/9252) -- Fixed an issue where exclusive target-specific dependencies that overlapped - across dependency kinds (like regular and build-dependencies) would - incorrectly include the dependencies in both. - [#9255](https://github.com/rust-lang/cargo/pull/9255) -- Fixed panic with certain styles of Package IDs when passed to the `-p` flag. - [#9188](https://github.com/rust-lang/cargo/pull/9188) -- When running cargo with output not going to a TTY, and with the progress bar - and color force-enabled, the output will now correctly clear the progress - line. - [#9231](https://github.com/rust-lang/cargo/pull/9231) -- Error instead of panic when JSON may contain non-utf8 paths. - [#9226](https://github.com/rust-lang/cargo/pull/9226) -- Fixed a hang that can happen on broken stderr. - [#9201](https://github.com/rust-lang/cargo/pull/9201) -- Fixed thin-local LTO not being disabled correctly when `lto=off` is set. - [#9182](https://github.com/rust-lang/cargo/pull/9182) - -### Nightly only -- The `strip` profile option now supports `true` and `false` values. - [#9153](https://github.com/rust-lang/cargo/pull/9153) -- `cargo fix --edition` now displays a report when switching to 2021 if the - new resolver changes features. - [#9268](https://github.com/rust-lang/cargo/pull/9268) -- Added `[patch]` table support in `.cargo/config` files. - [#9204](https://github.com/rust-lang/cargo/pull/9204) -- Added `cargo describe-future-incompatibilities` for generating a report on - dependencies that contain future-incompatible warnings. - [#8825](https://github.com/rust-lang/cargo/pull/8825) -- Added easier support for testing the 2021 edition. - [#9184](https://github.com/rust-lang/cargo/pull/9184) -- Switch the default resolver to "2" in the 2021 edition. - [#9184](https://github.com/rust-lang/cargo/pull/9184) -- `cargo fix --edition` now supports 2021. - [#9184](https://github.com/rust-lang/cargo/pull/9184) -- Added `--print` flag to `cargo rustc` to pass along to `rustc` to display - information from rustc. - [#9002](https://github.com/rust-lang/cargo/pull/9002) -- Added `-Zdoctest-in-workspace` for changing the directory where doctests are - *run* versus where they are *compiled*. - [#9105](https://github.com/rust-lang/cargo/pull/9105) -- Added support for an `[env]` section in `.cargo/config.toml` to set - environment variables when running cargo. - [#9175](https://github.com/rust-lang/cargo/pull/9175) -- Added a schema field and `features2` field to the index. - [#9161](https://github.com/rust-lang/cargo/pull/9161) -- Changes to JSON spec targets will now trigger a rebuild. - [#9223](https://github.com/rust-lang/cargo/pull/9223) - -## Cargo 1.51 (2021-03-25) -[75d5d8cf...rust-1.51.0](https://github.com/rust-lang/cargo/compare/75d5d8cf...rust-1.51.0) - -### Added -- ๐Ÿ”ฅ Added the `split-debuginfo` profile option. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#split-debuginfo) - [#9112](https://github.com/rust-lang/cargo/pull/9112) -- Added the `path` field to `cargo metadata` for the package dependencies list - to show the path for "path" dependencies. - [#8994](https://github.com/rust-lang/cargo/pull/8994) -- ๐Ÿ”ฅ Added a new feature resolver, and new CLI feature flag behavior. See the - new [features](https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2) - and [resolver](https://doc.rust-lang.org/nightly/cargo/reference/resolver.html#feature-resolver-version-2) - documentation for the `resolver = "2"` option. See the - [CLI](https://doc.rust-lang.org/nightly/cargo/reference/features.html#command-line-feature-options) - and [resolver 2 CLI](https://doc.rust-lang.org/nightly/cargo/reference/features.html#resolver-version-2-command-line-flags) - options for the new CLI behavior. And, finally, see - [RFC 2957](https://github.com/rust-lang/rfcs/blob/master/text/2957-cargo-features2.md) - for a detailed look at what has changed. - [#8997](https://github.com/rust-lang/cargo/pull/8997) - -### Changed -- `cargo install --locked` now emits a warning if `Cargo.lock` is not found. - [#9108](https://github.com/rust-lang/cargo/pull/9108) -- Unknown or ambiguous package IDs passed on the command-line now display - suggestions for the correct package ID. - [#9095](https://github.com/rust-lang/cargo/pull/9095) -- Slightly optimize `cargo vendor` - [#8937](https://github.com/rust-lang/cargo/pull/8937) - [#9131](https://github.com/rust-lang/cargo/pull/9131) - [#9132](https://github.com/rust-lang/cargo/pull/9132) - -### Fixed -- Fixed environment variables and cfg settings emitted by a build script that - are set for `cargo test` and `cargo run` when the build script runs multiple - times during the same build session. - [#9122](https://github.com/rust-lang/cargo/pull/9122) -- Fixed a panic with `cargo doc` and the new feature resolver. This also - introduces some heuristics to try to avoid path collisions with `rustdoc` by - only documenting one variant of a package if there are multiple (such as - multiple versions, or the same package shared for host and target - platforms). - [#9077](https://github.com/rust-lang/cargo/pull/9077) -- Fixed a bug in Cargo's cyclic dep graph detection that caused a stack - overflow. - [#9075](https://github.com/rust-lang/cargo/pull/9075) -- Fixed build script `links` environment variables (`DEP_*`) not showing up - for testing packages in some cases. - [#9065](https://github.com/rust-lang/cargo/pull/9065) -- Fixed features being selected in a nondeterministic way for a specific - scenario when building an entire workspace with all targets with a - proc-macro in the workspace with `resolver="2"`. - [#9059](https://github.com/rust-lang/cargo/pull/9059) -- Fixed to use `http.proxy` setting in `~/.gitconfig`. - [#8986](https://github.com/rust-lang/cargo/pull/8986) -- Fixed --feature pkg/feat for V1 resolver for non-member. - [#9275](https://github.com/rust-lang/cargo/pull/9275) - [#9277](https://github.com/rust-lang/cargo/pull/9277) -- Fixed panic in `cargo doc` when there are colliding output filenames in a workspace. - [#9276](https://github.com/rust-lang/cargo/pull/9276) - [#9277](https://github.com/rust-lang/cargo/pull/9277) -- Fixed `cargo install` from exiting with success if one of several packages - did not install successfully. - [#9185](https://github.com/rust-lang/cargo/pull/9185) - [#9196](https://github.com/rust-lang/cargo/pull/9196) -- Fix panic with doc collision orphan. - [#9142](https://github.com/rust-lang/cargo/pull/9142) - [#9196](https://github.com/rust-lang/cargo/pull/9196) - -### Nightly only -- Removed the `publish-lockfile` unstable feature, it was stabilized without - the need for an explicit flag 1.5 years ago. - [#9092](https://github.com/rust-lang/cargo/pull/9092) -- Added better diagnostics, help messages, and documentation for nightly - features (such as those passed with the `-Z` flag, or specified with - `cargo-features` in `Cargo.toml`). - [#9092](https://github.com/rust-lang/cargo/pull/9092) -- Added support for Rust edition 2021. - [#8922](https://github.com/rust-lang/cargo/pull/8922) -- Added support for the `rust-version` field in project metadata. - [#8037](https://github.com/rust-lang/cargo/pull/8037) -- Added a schema field to the index. - [#9161](https://github.com/rust-lang/cargo/pull/9161) - [#9196](https://github.com/rust-lang/cargo/pull/9196) - -## Cargo 1.50 (2021-02-11) -[8662ab42...rust-1.50.0](https://github.com/rust-lang/cargo/compare/8662ab42...rust-1.50.0) - -### Added -- Added the `doc` field to `cargo metadata`, which indicates if a target is - documented. - [#8869](https://github.com/rust-lang/cargo/pull/8869) -- Added `RUSTC_WORKSPACE_WRAPPER`, an alternate RUSTC wrapper that only runs - for the local workspace packages, and caches its artifacts independently of - non-wrapped builds. - [#8976](https://github.com/rust-lang/cargo/pull/8976) -- Added `--workspace` to `cargo update` to update only the workspace members, - and not their dependencies. This is particularly useful if you update the - version in `Cargo.toml` and want to update `Cargo.lock` without running any - other commands. - [#8725](https://github.com/rust-lang/cargo/pull/8725) - -### Changed -- `.crate` files uploaded to a registry are now built with reproducible - settings, so that the same `.crate` file created on different machines - should be identical. - [#8864](https://github.com/rust-lang/cargo/pull/8864) -- Git dependencies that specify more than one of `branch`, `tag`, or `rev` are - now rejected. - [#8984](https://github.com/rust-lang/cargo/pull/8984) -- The `rerun-if-changed` build script directive can now point to a directory, - in which case Cargo will check if any file in that directory changes. - [#8973](https://github.com/rust-lang/cargo/pull/8973) -- If Cargo cannot determine the username or email address, `cargo new` will no - longer fail, and instead create an empty authors list. - [#8912](https://github.com/rust-lang/cargo/pull/8912) -- The progress bar width has been reduced to provide more room to display the - crates currently being built. - [#8892](https://github.com/rust-lang/cargo/pull/8892) -- `cargo new` will now support `includeIf` directives in `.gitconfig` to match - the correct directory when determining the username and email address. - [#8886](https://github.com/rust-lang/cargo/pull/8886) - -### Fixed -- Fixed `cargo metadata` and `cargo tree` to only download packages for the - requested target. - [#8987](https://github.com/rust-lang/cargo/pull/8987) -- Updated libgit2, which brings in many fixes, particularly fixing a zlib - error that occasionally appeared on 32-bit systems. - [#8998](https://github.com/rust-lang/cargo/pull/8998) -- Fixed stack overflow with a circular dev-dependency that uses the `links` - field. - [#8969](https://github.com/rust-lang/cargo/pull/8969) -- Fixed `cargo publish` failing on some filesystems, particularly 9p on WSL2. - [#8950](https://github.com/rust-lang/cargo/pull/8950) - -### Nightly only -- Allow `resolver="1"` to specify the original feature resolution behavior. - [#8857](https://github.com/rust-lang/cargo/pull/8857) -- Added `-Z extra-link-arg` which adds the `cargo:rustc-link-arg-bins` - and `cargo:rustc-link-arg` build script options. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#extra-link-arg) - [#8441](https://github.com/rust-lang/cargo/pull/8441) -- Implemented external credential process support, and added `cargo logout`. - ([RFC 2730](https://github.com/rust-lang/rfcs/blob/master/text/2730-cargo-token-from-process.md)) - ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process)) - [#8934](https://github.com/rust-lang/cargo/pull/8934) -- Fix panic with `-Zbuild-std` and no roots. - [#8942](https://github.com/rust-lang/cargo/pull/8942) -- Set docs.rs as the default extern-map for crates.io - [#8877](https://github.com/rust-lang/cargo/pull/8877) - -## Cargo 1.49 (2020-12-31) -[75615f8e...rust-1.49.0](https://github.com/rust-lang/cargo/compare/75615f8e...rust-1.49.0) - -### Added -- Added `homepage` and `documentation` fields to `cargo metadata`. - [#8744](https://github.com/rust-lang/cargo/pull/8744) -- Added the `CARGO_PRIMARY_PACKAGE` environment variable which is set when - running `rustc` if the package is one of the "root" packages selected on the - command line. - [#8758](https://github.com/rust-lang/cargo/pull/8758) -- Added support for Unix-style glob patterns for package and target selection - flags on the command-line (such as `-p 'serde*'` or `--test '*'`). - [#8752](https://github.com/rust-lang/cargo/pull/8752) - -### Changed -- Computed LTO flags are now included in the filename metadata hash so that - changes in LTO settings will independently cache build artifacts instead of - overwriting previous ones. This prevents rebuilds in some situations such as - switching between `cargo build` and `cargo test` in some circumstances. - [#8755](https://github.com/rust-lang/cargo/pull/8755) -- `cargo tree` now displays `(proc-macro)` next to proc-macro packages. - [#8765](https://github.com/rust-lang/cargo/pull/8765) -- Added a warning that the allowed characters for a feature name have been - restricted to letters, digits, `_`, `-`, and `+` to accommodate future - syntax changes. This is still a superset of the allowed syntax on crates.io, - which requires ASCII. This is intended to be changed to an error in the - future. - [#8814](https://github.com/rust-lang/cargo/pull/8814) -- `-p` without a value will now print a list of workspace package names. - [#8808](https://github.com/rust-lang/cargo/pull/8808) -- Add period to allowed feature name characters. - [#8932](https://github.com/rust-lang/cargo/pull/8932) - [#8943](https://github.com/rust-lang/cargo/pull/8943) - -### Fixed -- Fixed building a library with both "dylib" and "rlib" crate types with LTO enabled. - [#8754](https://github.com/rust-lang/cargo/pull/8754) -- Fixed paths in Cargo's dep-info files. - [#8819](https://github.com/rust-lang/cargo/pull/8819) -- Fixed inconsistent source IDs in `cargo metadata` for git dependencies that - explicitly specify `branch="master"`. - [#8824](https://github.com/rust-lang/cargo/pull/8824) -- Fixed re-extracting dependencies which contained a `.cargo-ok` file. - [#8835](https://github.com/rust-lang/cargo/pull/8835) - -### Nightly only -- Fixed a panic with `cargo doc -Zfeatures=itarget` in some situations. - [#8777](https://github.com/rust-lang/cargo/pull/8777) -- New implementation for namespaced features, using the syntax `dep:serde`. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#namespaced-features) - [#8799](https://github.com/rust-lang/cargo/pull/8799) -- Added support for "weak" dependency features, using the syntax - `dep_name?/feat_name`, which will enable a feature for a dependency without - also enabling the dependency. - [#8818](https://github.com/rust-lang/cargo/pull/8818) -- Fixed the new feature resolver downloading extra dependencies that weren't - strictly necessary. - [#8823](https://github.com/rust-lang/cargo/pull/8823) - -## Cargo 1.48 (2020-11-19) -[51b66125...rust-1.48.0](https://github.com/rust-lang/cargo/compare/51b66125...rust-1.48.0) - -### Added -- Added `term.progress` configuration option to control when and how the - progress bar is displayed. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#termprogresswhen) - [#8165](https://github.com/rust-lang/cargo/pull/8165) -- Added `--message-format plain` option to `cargo locate-project` to display - the project location without JSON to make it easier to use in a script. - [#8707](https://github.com/rust-lang/cargo/pull/8707) -- Added `--workspace` option to `cargo locate-project` to display the path to - the workspace manifest. - [#8712](https://github.com/rust-lang/cargo/pull/8712) -- A new contributor guide has been added for contributing to Cargo itself. - This is published at . - [#8715](https://github.com/rust-lang/cargo/pull/8715) -- Zsh `--target` completion will now complete with the built-in rustc targets. - [#8740](https://github.com/rust-lang/cargo/pull/8740) - -### Changed - -### Fixed -- Fixed `cargo new` creating a fossil repository to properly ignore the `target` directory. - [#8671](https://github.com/rust-lang/cargo/pull/8671) -- Don't show warnings about the workspace in the current directory when using `cargo install` - of a remote package. - [#8681](https://github.com/rust-lang/cargo/pull/8681) -- Automatically reinitialize the index when an "Object not found" error is - encountered in the git repository. - [#8735](https://github.com/rust-lang/cargo/pull/8735) -- Updated libgit2, which brings in several fixes for git repository handling. - [#8778](https://github.com/rust-lang/cargo/pull/8778) - [#8780](https://github.com/rust-lang/cargo/pull/8780) - -### Nightly only -- Fixed `cargo install` so that it will ignore the `[unstable]` table in local config files. - [#8656](https://github.com/rust-lang/cargo/pull/8656) -- Fixed nondeterministic behavior of the new feature resolver. - [#8701](https://github.com/rust-lang/cargo/pull/8701) -- Fixed running `cargo test` on a proc-macro with the new feature resolver - under a specific combination of circumstances. - [#8742](https://github.com/rust-lang/cargo/pull/8742) - -## Cargo 1.47 (2020-10-08) -[4f74d9b2...rust-1.47.0](https://github.com/rust-lang/cargo/compare/4f74d9b2...rust-1.47.0) - -### Added -- `cargo doc` will now include the package's version in the left sidebar. - [#8509](https://github.com/rust-lang/cargo/pull/8509) -- Added the `test` field to `cargo metadata` targets. - [#8478](https://github.com/rust-lang/cargo/pull/8478) -- Cargo's man pages are now displayed via the `cargo help` command (such as - `cargo help build`). - [#8456](https://github.com/rust-lang/cargo/pull/8456) - [#8577](https://github.com/rust-lang/cargo/pull/8577) -- Added new documentation chapters on [how dependency resolution - works](https://doc.rust-lang.org/nightly/cargo/reference/resolver.html) and - [SemVer - compatibility](https://doc.rust-lang.org/nightly/cargo/reference/semver.html), - along with suggestions on how to version your project and work with - dependencies. - [#8609](https://github.com/rust-lang/cargo/pull/8609) - -### Changed -- The comments added to `.gitignore` when it is modified have been tweaked to - add some spacing. - [#8476](https://github.com/rust-lang/cargo/pull/8476) -- `cargo metadata` output should now be sorted to be deterministic. - [#8489](https://github.com/rust-lang/cargo/pull/8489) -- By default, build scripts and proc-macros are now built with `opt-level=0` - and the default codegen units, even in release mode. - [#8500](https://github.com/rust-lang/cargo/pull/8500) -- `workspace.default-members` is now filtered by `workspace.exclude`. - [#8485](https://github.com/rust-lang/cargo/pull/8485) -- `workspace.members` globs now ignore non-directory paths. - [#8511](https://github.com/rust-lang/cargo/pull/8511) -- git zlib errors now trigger a retry. - [#8520](https://github.com/rust-lang/cargo/pull/8520) -- "http" class git errors now trigger a retry. - [#8553](https://github.com/rust-lang/cargo/pull/8553) -- git dependencies now override the `core.autocrlf` git configuration value to - ensure they behave consistently across platforms, particularly when - vendoring git dependencies on Windows. - [#8523](https://github.com/rust-lang/cargo/pull/8523) -- If `Cargo.lock` needs to be updated, then it will be automatically - transitioned to the new V2 format. This format removes the `[metadata]` - table, and should be easier to merge changes in source control systems. This - format was introduced in 1.38, and made the default for new projects in - 1.41. - [#8554](https://github.com/rust-lang/cargo/pull/8554) -- Added preparation for support of git repositories with a non-"master" - default branch. Actual support will arrive in a future version. This - introduces some warnings: - - Warn if a git dependency does not specify a branch, and the default branch - on the repository is not "master". In the future, Cargo will fetch the - default branch. In this scenario, the branch should be explicitly - specified. - - Warn if a workspace has multiple dependencies to the same git repository, - one without a `branch` and one with `branch="master"`. Dependencies should - all use one form or the other. - [#8522](https://github.com/rust-lang/cargo/pull/8522) -- Warnings are now issued if a `required-features` entry lists a feature that - does not exist. - [#7950](https://github.com/rust-lang/cargo/pull/7950) -- Built-in aliases are now included in `cargo --list`. - [#8542](https://github.com/rust-lang/cargo/pull/8542) -- `cargo install` with a specific version that has been yanked will now - display an error message that it has been yanked, instead of "could not - find". - [#8565](https://github.com/rust-lang/cargo/pull/8565) -- `cargo publish` with a package that has the `publish` field set to a single - registry, and no `--registry` flag has been given, will now publish to that - registry instead of generating an error. - [#8571](https://github.com/rust-lang/cargo/pull/8571) - -### Fixed -- Fixed issue where if a project directory was moved, and one of the - build scripts did not use the `rerun-if-changed` directive, then that - build script was being rebuilt when it shouldn't. - [#8497](https://github.com/rust-lang/cargo/pull/8497) -- Console colors should now work on Windows 7 and 8. - [#8540](https://github.com/rust-lang/cargo/pull/8540) -- The `CARGO_TARGET_{triplet}_RUNNER` environment variable will now correctly - override the config file instead of trying to merge the commands. - [#8629](https://github.com/rust-lang/cargo/pull/8629) -- Fixed LTO with doctests. - [#8657](https://github.com/rust-lang/cargo/pull/8657) - [#8658](https://github.com/rust-lang/cargo/pull/8658) - -### Nightly only -- Added support for `-Z terminal-width` which tells `rustc` the width of the - terminal so that it can format diagnostics better. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#terminal-width) - [#8427](https://github.com/rust-lang/cargo/pull/8427) -- Added ability to configure `-Z` unstable flags in config files via the - `[unstable]` table. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html) - [#8393](https://github.com/rust-lang/cargo/pull/8393) -- Added `-Z build-std-features` flag to set features for the standard library. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std-features) - [#8490](https://github.com/rust-lang/cargo/pull/8490) - -## Cargo 1.46 (2020-08-27) -[9fcb8c1d...rust-1.46.0](https://github.com/rust-lang/cargo/compare/9fcb8c1d...rust-1.46.0) - -### Added -- The `dl` key in `config.json` of a registry index now supports the - replacement markers `{prefix}` and `{lowerprefix}` to allow spreading crates - across directories similar to how the index itself is structured. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/registries.html#index-format) - [#8267](https://github.com/rust-lang/cargo/pull/8267) -- Added new environment variables that are set during compilation: - - `CARGO_CRATE_NAME`: The name of the crate being built. - - `CARGO_BIN_NAME`: The name of the executable binary (if this is a binary crate). - - `CARGO_PKG_LICENSE`: The `license` field from the manifest. - - `CARGO_PKG_LICENSE_FILE`: The `license-file` field from the manifest. - [#8270](https://github.com/rust-lang/cargo/pull/8270) - [#8325](https://github.com/rust-lang/cargo/pull/8325) - [#8387](https://github.com/rust-lang/cargo/pull/8387) -- If the value for `readme` is not specified in `Cargo.toml`, it is now - automatically inferred from the existence of a file named `README`, - `README.md`, or `README.txt`. This can be suppressed by setting - `readme = false`. - [#8277](https://github.com/rust-lang/cargo/pull/8277) -- `cargo install` now supports the `--index` flag to install directly from an index. - [#8344](https://github.com/rust-lang/cargo/pull/8344) -- Added the `metadata` table to the `workspace` definition in `Cargo.toml`. - This can be used for arbitrary data similar to the `package.metadata` table. - [#8323](https://github.com/rust-lang/cargo/pull/8323) -- Added the `--target-dir` flag to `cargo install` to set the target directory. - [#8391](https://github.com/rust-lang/cargo/pull/8391) -- Changes to environment variables used by the - [`env!`](https://doc.rust-lang.org/std/macro.env.html) or - [`option_env!`](https://doc.rust-lang.org/std/macro.option_env.html) macros - are now automatically detected to trigger a rebuild. - [#8421](https://github.com/rust-lang/cargo/pull/8421) -- The `target` directory now includes the `CACHEDIR.TAG` file which is used by - some tools to exclude the directory from backups. - [#8378](https://github.com/rust-lang/cargo/pull/8378) -- Added docs about rustup's `+toolchain` syntax. - [#8455](https://github.com/rust-lang/cargo/pull/8455) - -### Changed -- A warning is now displayed if a git dependency includes a `#` fragment in - the URL. This was potentially confusing because Cargo itself displays git - URLs with this syntax, but it does not have any meaning outside of the - `Cargo.lock` file, and would not work properly. - [#8297](https://github.com/rust-lang/cargo/pull/8297) -- Various optimizations and fixes for bitcode embedding and LTO. - [#8349](https://github.com/rust-lang/cargo/pull/8349) -- Reduced the amount of data fetched for git dependencies. If Cargo knows the - branch or tag to fetch, it will now only fetch that branch or tag instead of - all branches and tags. - [#8363](https://github.com/rust-lang/cargo/pull/8363) -- Enhanced git fetch error messages. - [#8409](https://github.com/rust-lang/cargo/pull/8409) -- `.crate` files are now generated with GNU tar format instead of UStar, which - supports longer file names. - [#8453](https://github.com/rust-lang/cargo/pull/8453) - -### Fixed -- Fixed a rare situation where an update to `Cargo.lock` failed once, but then - subsequent runs allowed it proceed. - [#8274](https://github.com/rust-lang/cargo/pull/8274) -- Removed assertion that Windows dylibs must have a `.dll` extension. Some - custom JSON spec targets may change the extension. - [#8310](https://github.com/rust-lang/cargo/pull/8310) -- Updated libgit2, which brings in a fix for zlib errors for some remote - git servers like googlesource.com. - [#8320](https://github.com/rust-lang/cargo/pull/8320) -- Fixed the GitHub fast-path check for up-to-date git dependencies on - non-master branches. - [#8363](https://github.com/rust-lang/cargo/pull/8363) -- Fixed issue when enabling a feature with `pkg/feature` syntax, and `pkg` is - an optional dependency, but also a dev-dependency, and the dev-dependency - appears before the optional normal dependency in the registry summary, then - the optional dependency would not get activated. - [#8395](https://github.com/rust-lang/cargo/pull/8395) -- Fixed `clean -p` deleting the build directory if there is a test named - `build`. - [#8398](https://github.com/rust-lang/cargo/pull/8398) -- Fixed indentation of multi-line Cargo error messages. - [#8409](https://github.com/rust-lang/cargo/pull/8409) -- Fixed issue where the automatic inclusion of the `--document-private-items` - flag for rustdoc would override any flags passed to the `cargo rustdoc` - command. - [#8449](https://github.com/rust-lang/cargo/pull/8449) -- Cargo will now include a version in the hash of the fingerprint directories - to support backwards-incompatible changes to the fingerprint structure. - [#8473](https://github.com/rust-lang/cargo/pull/8473) - [#8488](https://github.com/rust-lang/cargo/pull/8488) - -### Nightly only -- Added `-Zrustdoc-map` feature which provides external mappings for rustdoc - (such as https://docs.rs/ links). - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#rustdoc-map) - [#8287](https://github.com/rust-lang/cargo/pull/8287) -- Fixed feature calculation when a proc-macro is declared in `Cargo.toml` with - an underscore (like `proc_macro = true`). - [#8319](https://github.com/rust-lang/cargo/pull/8319) -- Added support for setting `-Clinker` with `-Zdoctest-xcompile`. - [#8359](https://github.com/rust-lang/cargo/pull/8359) -- Fixed setting the `strip` profile field in config files. - [#8454](https://github.com/rust-lang/cargo/pull/8454) - -## Cargo 1.45 (2020-07-16) -[ebda5065e...rust-1.45.0](https://github.com/rust-lang/cargo/compare/ebda5065...rust-1.45.0) - -### Added - -### Changed -- Changed official documentation to recommend `.cargo/config.toml` filenames - (with the `.toml` extension). `.toml` extension support was added in 1.39. - [#8121](https://github.com/rust-lang/cargo/pull/8121) -- The `registry.index` config value is no longer allowed (it has been - deprecated for 4 years). - [#7973](https://github.com/rust-lang/cargo/pull/7973) -- An error is generated if both `--index` and `--registry` are passed - (previously `--index` was silently ignored). - [#7973](https://github.com/rust-lang/cargo/pull/7973) -- The `registry.token` config value is no longer used with the `--index` flag. - This is intended to avoid potentially leaking the crates.io token to another - registry. - [#7973](https://github.com/rust-lang/cargo/pull/7973) -- Added a warning if `registry.token` is used with source replacement. It is - intended this will be an error in future versions. - [#7973](https://github.com/rust-lang/cargo/pull/7973) -- Windows GNU targets now copy `.dll.a` import library files for DLL crate - types to the output directory. - [#8141](https://github.com/rust-lang/cargo/pull/8141) -- Dylibs for all dependencies are now unconditionally copied to the output - directory. Some obscure scenarios can cause an old dylib to be referenced - between builds, and this ensures that all the latest copies are used. - [#8139](https://github.com/rust-lang/cargo/pull/8139) -- `package.exclude` can now match directory names. If a directory is - specified, the entire directory will be excluded, and Cargo will not attempt - to inspect it further. Previously Cargo would try to check every file in the - directory which could cause problems if the directory contained unreadable - files. - [#8095](https://github.com/rust-lang/cargo/pull/8095) -- When packaging with `cargo publish` or `cargo package`, Cargo can use git to - guide its decision on which files to include. Previously this git-based - logic required a `Cargo.toml` file to exist at the root of the repository. - This is no longer required, so Cargo will now use git-based guidance even if - there is not a `Cargo.toml` in the root of the repository. - [#8095](https://github.com/rust-lang/cargo/pull/8095) -- While unpacking a crate on Windows, if it fails to write a file because the - file is a reserved Windows filename (like "aux.rs"), Cargo will display an - extra message to explain why it failed. - [#8136](https://github.com/rust-lang/cargo/pull/8136) -- Failures to set mtime on files are now ignored. Some filesystems did not - support this. - [#8185](https://github.com/rust-lang/cargo/pull/8185) -- Certain classes of git errors will now recommend enabling - `net.git-fetch-with-cli`. - [#8166](https://github.com/rust-lang/cargo/pull/8166) -- When doing an LTO build, Cargo will now instruct rustc not to perform - codegen when possible. This may result in a faster build and use less disk - space. Additionally, for non-LTO builds, Cargo will instruct rustc to not - embed LLVM bitcode in libraries, which should decrease their size. - [#8192](https://github.com/rust-lang/cargo/pull/8192) - [#8226](https://github.com/rust-lang/cargo/pull/8226) - [#8254](https://github.com/rust-lang/cargo/pull/8254) -- The implementation for `cargo clean -p` has been rewritten so that it can - more accurately remove the files for a specific package. - [#8210](https://github.com/rust-lang/cargo/pull/8210) -- The way Cargo computes the outputs from a build has been rewritten to be - more complete and accurate. Newly tracked files will be displayed in JSON - messages, and may be uplifted to the output directory in some cases. Some of - the changes from this are: - - - `.exp` export files on Windows MSVC dynamic libraries are now tracked. - - Proc-macros on Windows track import/export files. - - All targets (like tests, etc.) that generate separate debug files - (pdb/dSYM) are tracked. - - Added .map files for wasm32-unknown-emscripten. - - macOS dSYM directories are tracked for all dynamic libraries - (dylib/cdylib/proc-macro) and for build scripts. - - There are a variety of other changes as a consequence of this: - - - Binary examples on Windows MSVC with a hyphen will now show up twice in - the examples directory (`foo_bar.exe` and `foo-bar.exe`). Previously Cargo - just renamed the file instead of hard-linking it. - - Example libraries now follow the same rules for hyphen/underscore - translation as normal libs (they will now use underscores). - - [#8210](https://github.com/rust-lang/cargo/pull/8210) -- Cargo attempts to scrub any secrets from the debug log for HTTP debugging. - [#8222](https://github.com/rust-lang/cargo/pull/8222) -- Context has been added to many of Cargo's filesystem operations, so that - error messages now provide more information, such as the path that caused - the problem. - [#8232](https://github.com/rust-lang/cargo/pull/8232) -- Several commands now ignore the error if stdout or stderr is closed while it - is running. For example `cargo install --list | grep -q cargo-fuzz` would - previously sometimes panic because `grep -q` may close stdout before the - command finishes. Regular builds continue to fail if stdout or stderr is - closed, matching the behavior of many other build systems. - [#8236](https://github.com/rust-lang/cargo/pull/8236) -- If `cargo install` is given an exact version, like `--version=1.2.3`, it - will now avoid updating the index if that version is already installed, and - exit quickly indicating it is already installed. - [#8022](https://github.com/rust-lang/cargo/pull/8022) -- Changes to the `[patch]` section will now attempt to automatically update - `Cargo.lock` to the new version. It should now also provide better error - messages for the rare cases where it is unable to automatically update. - [#8248](https://github.com/rust-lang/cargo/pull/8248) - -### Fixed -- Fixed copying Windows `.pdb` files to the output directory when the filename - contained dashes. - [#8123](https://github.com/rust-lang/cargo/pull/8123) -- Fixed error where Cargo would fail when scanning if a package is inside a - git repository when any of its ancestor paths is a symlink. - [#8186](https://github.com/rust-lang/cargo/pull/8186) -- Fixed `cargo update` with an unused `[patch]` so that it does not get - stuck and refuse to update. - [#8243](https://github.com/rust-lang/cargo/pull/8243) -- Fixed a situation where Cargo would hang if stderr is closed, and the - compiler generated a large number of messages. - [#8247](https://github.com/rust-lang/cargo/pull/8247) -- Fixed backtraces on macOS not showing filenames or line numbers. As a - consequence of this, binary executables on apple targets do not include a - hash in the filename in Cargo's cache. This means Cargo can only track one - copy, so if you switch features or rustc versions, Cargo will need to - rebuild the executable. - [#8329](https://github.com/rust-lang/cargo/pull/8329) - [#8335](https://github.com/rust-lang/cargo/pull/8335) -- Fixed fingerprinting when using lld on Windows with a dylib. Cargo was - erroneously thinking the dylib was never fresh. - [#8290](https://github.com/rust-lang/cargo/pull/8290) - [#8335](https://github.com/rust-lang/cargo/pull/8335) - -### Nightly only -- Fixed passing the full path for `--target` to `rustdoc` when using JSON spec - targets. - [#8094](https://github.com/rust-lang/cargo/pull/8094) -- `-Cembed-bitcode=no` renamed to `-Cbitcode-in-rlib=no` - [#8134](https://github.com/rust-lang/cargo/pull/8134) -- Added new `resolver` field to `Cargo.toml` to opt-in to the new feature - resolver. - [#8129](https://github.com/rust-lang/cargo/pull/8129) -- `-Zbuild-std` no longer treats std dependencies as "local". This means that - it won't use incremental compilation for those dependencies, removes them - from dep-info files, and caps lints at "allow". - [#8177](https://github.com/rust-lang/cargo/pull/8177) -- Added `-Zmultitarget` which allows multiple `--target` flags to build the - same thing for multiple targets at once. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#multitarget) - [#8167](https://github.com/rust-lang/cargo/pull/8167) -- Added `strip` option to the profile to remove symbols and debug information. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-strip-option) - [#8246](https://github.com/rust-lang/cargo/pull/8246) -- Fixed panic with `cargo tree --target=all -Zfeatures=all`. - [#8269](https://github.com/rust-lang/cargo/pull/8269) - -## Cargo 1.44 (2020-06-04) -[bda50510...rust-1.44.0](https://github.com/rust-lang/cargo/compare/bda50510...rust-1.44.0) - -### Added -- ๐Ÿ”ฅ Added the `cargo tree` command. - [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-tree.html) - [#8062](https://github.com/rust-lang/cargo/pull/8062) -- Added warnings if a package has Windows-restricted filenames (like `nul`, - `con`, `aux`, `prn`, etc.). - [#7959](https://github.com/rust-lang/cargo/pull/7959) -- Added a `"build-finished"` JSON message when compilation is complete so that - tools can detect when they can stop listening for JSON messages with - commands like `cargo run` or `cargo test`. - [#8069](https://github.com/rust-lang/cargo/pull/8069) - -### Changed -- Valid package names are now restricted to Unicode XID identifiers. This is - mostly the same as before, except package names cannot start with a number - or `-`. - [#7959](https://github.com/rust-lang/cargo/pull/7959) -- `cargo new` and `init` will warn or reject additional package names - (reserved Windows names, reserved Cargo directories, non-ASCII names, - conflicting std names like `core`, etc.). - [#7959](https://github.com/rust-lang/cargo/pull/7959) -- Tests are no longer hard-linked into the output directory (`target/debug/`). - This ensures tools will have access to debug symbols and execute tests in - the same way as Cargo. Tools should use JSON messages to discover the path - to the executable. - [#7965](https://github.com/rust-lang/cargo/pull/7965) -- Updating git submodules now displays an "Updating" message for each - submodule. - [#7989](https://github.com/rust-lang/cargo/pull/7989) -- File modification times are now preserved when extracting a `.crate` file. - This reverses the change made in 1.40 where the mtime was not preserved. - [#7935](https://github.com/rust-lang/cargo/pull/7935) -- Build script warnings are now displayed separately when the build script - fails. - [#8017](https://github.com/rust-lang/cargo/pull/8017) -- Removed the `git-checkout` subcommand. - [#8040](https://github.com/rust-lang/cargo/pull/8040) -- The progress bar is now enabled for all unix platforms. Previously it was - only Linux, macOS, and FreeBSD. - [#8054](https://github.com/rust-lang/cargo/pull/8054) -- Artifacts generated by pre-release versions of `rustc` now share the same - filenames. This means that changing nightly versions will not leave stale - files in the build directory. - [#8073](https://github.com/rust-lang/cargo/pull/8073) -- Invalid package names are rejected when using renamed dependencies. - [#8090](https://github.com/rust-lang/cargo/pull/8090) -- Added a certain class of HTTP2 errors as "spurious" that will get retried. - [#8102](https://github.com/rust-lang/cargo/pull/8102) -- Allow `cargo package --list` to succeed, even if there are other validation - errors (such as `Cargo.lock` generation problem, or missing dependencies). - [#8175](https://github.com/rust-lang/cargo/pull/8175) - [#8215](https://github.com/rust-lang/cargo/pull/8215) - -### Fixed -- Cargo no longer buffers excessive amounts of compiler output in memory. - [#7838](https://github.com/rust-lang/cargo/pull/7838) -- Symbolic links in git repositories now work on Windows. - [#7996](https://github.com/rust-lang/cargo/pull/7996) -- Fixed an issue where `profile.dev` was not loaded from a config file with - `cargo test` when the `dev` profile was not defined in `Cargo.toml`. - [#8012](https://github.com/rust-lang/cargo/pull/8012) -- When a binary is built as an implicit dependency of an integration test, - it now checks `dep_name/feature_name` syntax in `required-features` correctly. - [#8020](https://github.com/rust-lang/cargo/pull/8020) -- Fixed an issue where Cargo would not detect that an executable (such as an - integration test) needs to be rebuilt when the previous build was - interrupted with Ctrl-C. - [#8087](https://github.com/rust-lang/cargo/pull/8087) -- Protect against some (unknown) situations where Cargo could panic when the - system monotonic clock doesn't appear to be monotonic. - [#8114](https://github.com/rust-lang/cargo/pull/8114) -- Fixed panic with `cargo clean -p` if the package has a build script. - [#8216](https://github.com/rust-lang/cargo/pull/8216) - -### Nightly only -- Fixed panic with new feature resolver and required-features. - [#7962](https://github.com/rust-lang/cargo/pull/7962) -- Added `RUSTC_WORKSPACE_WRAPPER` environment variable, which provides a way - to wrap `rustc` for workspace members only, and affects the filename hash so - that artifacts produced by the wrapper are cached separately. This usage can - be seen on nightly clippy with `cargo clippy -Zunstable-options`. - [#7533](https://github.com/rust-lang/cargo/pull/7533) -- Added `--unit-graph` CLI option to display Cargo's internal dependency graph - as JSON. - [#7977](https://github.com/rust-lang/cargo/pull/7977) -- Changed `-Zbuild_dep` to `-Zhost_dep`, and added proc-macros to the feature - decoupling logic. - [#8003](https://github.com/rust-lang/cargo/pull/8003) - [#8028](https://github.com/rust-lang/cargo/pull/8028) -- Fixed so that `--crate-version` is not automatically passed when the flag - is found in `RUSTDOCFLAGS`. - [#8014](https://github.com/rust-lang/cargo/pull/8014) -- Fixed panic with `-Zfeatures=dev_dep` and `check --profile=test`. - [#8027](https://github.com/rust-lang/cargo/pull/8027) -- Fixed panic with `-Zfeatures=itarget` with certain host dependencies. - [#8048](https://github.com/rust-lang/cargo/pull/8048) -- Added support for `-Cembed-bitcode=no`, which provides a performance boost - and disk-space usage reduction for non-LTO builds. - [#8066](https://github.com/rust-lang/cargo/pull/8066) -- `-Zpackage-features` has been extended with several changes intended to make - it easier to select features on the command-line in a workspace. - [#8074](https://github.com/rust-lang/cargo/pull/8074) - -## Cargo 1.43 (2020-04-23) -[9d32b7b0...rust-1.43.0](https://github.com/rust-lang/cargo/compare/9d32b7b0...rust-1.43.0) - -### Added -- ๐Ÿ”ฅ Profiles may now be specified in config files (and environment variables). - [docs](https://doc.rust-lang.org/nightly/cargo/reference/config.html#profile) - [#7823](https://github.com/rust-lang/cargo/pull/7823) -- โ— Added `CARGO_BIN_EXE_` environment variable when building - integration tests. This variable contains the path to any `[[bin]]` targets - in the package. Integration tests should use the `env!` macro to determine - the path to a binary to execute. - [docs](https://doc.rust-lang.org/nightly/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates) - [#7697](https://github.com/rust-lang/cargo/pull/7697) - -### Changed -- `cargo install --git` now honors workspaces in a git repository. This allows - workspace settings, like `[patch]`, `[replace]`, or `[profile]` to be used. - [#7768](https://github.com/rust-lang/cargo/pull/7768) -- `cargo new` will now run `rustfmt` on the new files to pick up rustfmt - settings like `tab_spaces` so that the new file matches the user's preferred - indentation settings. - [#7827](https://github.com/rust-lang/cargo/pull/7827) -- Environment variables printed with "very verbose" output (`-vv`) are now - consistently sorted. - [#7877](https://github.com/rust-lang/cargo/pull/7877) -- Debug logging for fingerprint rebuild-detection now includes more information. - [#7888](https://github.com/rust-lang/cargo/pull/7888) - [#7890](https://github.com/rust-lang/cargo/pull/7890) - [#7952](https://github.com/rust-lang/cargo/pull/7952) -- Added warning during publish if the license-file doesn't exist. - [#7905](https://github.com/rust-lang/cargo/pull/7905) -- The `license-file` file is automatically included during publish, even if it - is not explicitly listed in the `include` list or is in a location outside - of the root of the package. - [#7905](https://github.com/rust-lang/cargo/pull/7905) -- `CARGO_CFG_DEBUG_ASSERTIONS` and `CARGO_CFG_PROC_MACRO` are no longer set - when running a build script. These were inadvertently set in the past, but - had no meaning as they were always true. Additionally, `cfg(proc-macro)` - is no longer supported in a `target` expression. - [#7943](https://github.com/rust-lang/cargo/pull/7943) - [#7970](https://github.com/rust-lang/cargo/pull/7970) - -### Fixed -- Global command-line flags now work with aliases (like `cargo -v b`). - [#7837](https://github.com/rust-lang/cargo/pull/7837) -- Required-features using dependency syntax (like `renamed_dep/feat_name`) now - handle renamed dependencies correctly. - [#7855](https://github.com/rust-lang/cargo/pull/7855) -- Fixed a rare situation where if a build script is run multiple times during - the same build, Cargo will now keep the results separate instead of losing - the output of the first execution. - [#7857](https://github.com/rust-lang/cargo/pull/7857) -- Fixed incorrect interpretation of environment variable - `CARGO_TARGET_*_RUNNER=true` as a boolean. Also improved related env var - error messages. - [#7891](https://github.com/rust-lang/cargo/pull/7891) -- Updated internal libgit2 library, bringing various fixes to git support. - [#7939](https://github.com/rust-lang/cargo/pull/7939) -- `cargo package` / `cargo publish` should no longer buffer the entire - contents of each file in memory. - [#7946](https://github.com/rust-lang/cargo/pull/7946) -- Ignore more invalid `Cargo.toml` files in a git dependency. Cargo currently - walks the entire repo to find the requested package. Certain invalid - manifests were already skipped, and now it should skip all of them. - [#7947](https://github.com/rust-lang/cargo/pull/7947) - -### Nightly only -- Added `build.out-dir` config variable to set the output directory. - [#7810](https://github.com/rust-lang/cargo/pull/7810) -- Added `-Zjobserver-per-rustc` feature to support improved performance for - parallel rustc. - [#7731](https://github.com/rust-lang/cargo/pull/7731) -- Fixed filename collision with `build-std` and crates like `cc`. - [#7860](https://github.com/rust-lang/cargo/pull/7860) -- `-Ztimings` will now save its report even if there is an error. - [#7872](https://github.com/rust-lang/cargo/pull/7872) -- Updated `--config` command-line flag to support taking a path to a config - file to load. - [#7901](https://github.com/rust-lang/cargo/pull/7901) -- Added new feature resolver. - [#7820](https://github.com/rust-lang/cargo/pull/7820) -- Rustdoc docs now automatically include the version of the package in the - side bar (requires `-Z crate-versions` flag). - [#7903](https://github.com/rust-lang/cargo/pull/7903) - -## Cargo 1.42 (2020-03-12) -[0bf7aafe...rust-1.42.0](https://github.com/rust-lang/cargo/compare/0bf7aafe...rust-1.42.0) - -### Added -- Added documentation on git authentication. - [#7658](https://github.com/rust-lang/cargo/pull/7658) -- Bitbucket Pipeline badges are now supported on crates.io. - [#7663](https://github.com/rust-lang/cargo/pull/7663) -- `cargo vendor` now accepts the `--versioned-dirs` option to force it to - always include the version number in each package's directory name. - [#7631](https://github.com/rust-lang/cargo/pull/7631) -- The `proc_macro` crate is now automatically added to the extern prelude for - proc-macro packages. This means that `extern crate proc_macro;` is no longer - necessary for proc-macros. - [#7700](https://github.com/rust-lang/cargo/pull/7700) - -### Changed -- Emit a warning if `debug_assertions`, `test`, `proc_macro`, or `feature=` is - used in a `cfg()` expression. - [#7660](https://github.com/rust-lang/cargo/pull/7660) -- Large update to the Cargo documentation, adding new chapters on Cargo - targets, workspaces, and features. - [#7733](https://github.com/rust-lang/cargo/pull/7733) -- Windows: `.lib` DLL import libraries are now copied next to the dll for all - Windows MSVC targets. Previously it was only supported for - `pc-windows-msvc`. This adds DLL support for `uwp-windows-msvc` targets. - [#7758](https://github.com/rust-lang/cargo/pull/7758) -- The `ar` field in the `[target]` configuration is no longer read. It has - been ignored for over 4 years. - [#7763](https://github.com/rust-lang/cargo/pull/7763) -- Bash completion file simplified and updated for latest changes. - [#7789](https://github.com/rust-lang/cargo/pull/7789) -- Credentials are only loaded when needed, instead of every Cargo command. - [#7774](https://github.com/rust-lang/cargo/pull/7774) - -### Fixed -- Removed `--offline` empty index check, which was a false positive in some - cases. - [#7655](https://github.com/rust-lang/cargo/pull/7655) -- Files and directories starting with a `.` can now be included in a package - by adding it to the `include` list. - [#7680](https://github.com/rust-lang/cargo/pull/7680) -- Fixed `cargo login` removing alternative registry tokens when previous - entries existed in the credentials file. - [#7708](https://github.com/rust-lang/cargo/pull/7708) -- Fixed `cargo vendor` from panicking when used with alternative registries. - [#7718](https://github.com/rust-lang/cargo/pull/7718) -- Fixed incorrect explanation in the fingerprint debug log message. - [#7749](https://github.com/rust-lang/cargo/pull/7749) -- A `[source]` that is defined multiple times will now result in an error. - Previously it was randomly picking a source, which could cause - non-deterministic behavior. - [#7751](https://github.com/rust-lang/cargo/pull/7751) -- `dep_kinds` in `cargo metadata` are now de-duplicated. - [#7756](https://github.com/rust-lang/cargo/pull/7756) -- Fixed packaging where `Cargo.lock` was listed in `.gitignore` in a - subdirectory inside a git repository. Previously it was assuming - `Cargo.lock` was at the root of the repo. - [#7779](https://github.com/rust-lang/cargo/pull/7779) -- Partial file transfer errors will now cause an automatic retry. - [#7788](https://github.com/rust-lang/cargo/pull/7788) -- Linux: Fixed panic if CPU iowait stat decreases. - [#7803](https://github.com/rust-lang/cargo/pull/7803) -- Fixed using the wrong sysroot for detecting host compiler settings when - `--sysroot` is passed in via `RUSTFLAGS`. - [#7798](https://github.com/rust-lang/cargo/pull/7798) - -### Nightly only -- `build-std` now uses `--extern` instead of `--sysroot` to find sysroot - packages. - [#7699](https://github.com/rust-lang/cargo/pull/7699) -- Added `--config` command-line option to set config settings. - [#7649](https://github.com/rust-lang/cargo/pull/7649) -- Added `include` config setting which allows including another config file. - [#7649](https://github.com/rust-lang/cargo/pull/7649) -- Profiles in config files now support any named profile. Previously it was - limited to dev/release. - [#7750](https://github.com/rust-lang/cargo/pull/7750) - -## Cargo 1.41 (2020-01-30) -[5da4b4d4...rust-1.41.0](https://github.com/rust-lang/cargo/compare/5da4b4d4...rust-1.41.0) - -### Added -- ๐Ÿ”ฅ Cargo now uses a new `Cargo.lock` file format. This new format should - support easier merges in source control systems. Projects using the old - format will continue to use the old format, only new `Cargo.lock` files will - use the new format. - [#7579](https://github.com/rust-lang/cargo/pull/7579) -- ๐Ÿ”ฅ `cargo install` will now upgrade already installed packages instead of - failing. - [#7560](https://github.com/rust-lang/cargo/pull/7560) -- ๐Ÿ”ฅ Profile overrides have been added. This allows overriding profiles for - individual dependencies or build scripts. See [the - documentation](https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#overrides) - for more. - [#7591](https://github.com/rust-lang/cargo/pull/7591) -- Added new documentation for build scripts. - [#7565](https://github.com/rust-lang/cargo/pull/7565) -- Added documentation for Cargo's JSON output. - [#7595](https://github.com/rust-lang/cargo/pull/7595) -- Significant expansion of config and environment variable documentation. - [#7650](https://github.com/rust-lang/cargo/pull/7650) -- Add back support for `BROWSER` environment variable for `cargo doc --open`. - [#7576](https://github.com/rust-lang/cargo/pull/7576) -- Added `kind` and `platform` for dependencies in `cargo metadata`. - [#7132](https://github.com/rust-lang/cargo/pull/7132) -- The `OUT_DIR` value is now included in the `build-script-executed` JSON message. - [#7622](https://github.com/rust-lang/cargo/pull/7622) - -### Changed -- `cargo doc` will now document private items in binaries by default. - [#7593](https://github.com/rust-lang/cargo/pull/7593) -- Subcommand typo suggestions now include aliases. - [#7486](https://github.com/rust-lang/cargo/pull/7486) -- Tweak how the "already existing..." comment is added to `.gitignore`. - [#7570](https://github.com/rust-lang/cargo/pull/7570) -- Ignore `cargo login` text from copy/paste in token. - [#7588](https://github.com/rust-lang/cargo/pull/7588) -- Windows: Ignore errors for locking files when not supported by the filesystem. - [#7602](https://github.com/rust-lang/cargo/pull/7602) -- Remove `**/*.rs.bk` from `.gitignore`. - [#7647](https://github.com/rust-lang/cargo/pull/7647) - -### Fixed -- Fix unused warnings for some keys in the `build` config section. - [#7575](https://github.com/rust-lang/cargo/pull/7575) -- Linux: Don't panic when parsing `/proc/stat`. - [#7580](https://github.com/rust-lang/cargo/pull/7580) -- Don't show canonical path in `cargo vendor`. - [#7629](https://github.com/rust-lang/cargo/pull/7629) - -### Nightly only - - -## Cargo 1.40 (2019-12-19) -[1c6ec66d...5da4b4d4](https://github.com/rust-lang/cargo/compare/1c6ec66d...5da4b4d4) - -### Added -- Added `http.ssl-version` config option to control the version of TLS, - along with min/max versions. - [#7308](https://github.com/rust-lang/cargo/pull/7308) -- ๐Ÿ”ฅ Compiler warnings are now cached on disk. If a build generates warnings, - re-running the build will now re-display the warnings. - [#7450](https://github.com/rust-lang/cargo/pull/7450) -- Added `--filter-platform` option to `cargo metadata` to narrow the nodes - shown in the resolver graph to only packages included for the given target - triple. - [#7376](https://github.com/rust-lang/cargo/pull/7376) - -### Changed -- Cargo's "platform" `cfg` parsing has been extracted into a separate crate - named `cargo-platform`. - [#7375](https://github.com/rust-lang/cargo/pull/7375) -- Dependencies extracted into Cargo's cache no longer preserve mtimes to - reduce syscall overhead. - [#7465](https://github.com/rust-lang/cargo/pull/7465) -- Windows: EXE files no longer include a metadata hash in the filename. - This helps with debuggers correlating the filename with the PDB file. - [#7400](https://github.com/rust-lang/cargo/pull/7400) -- Wasm32: `.wasm` files are no longer treated as an "executable", allowing - `cargo test` and `cargo run` to work properly with the generated `.js` file. - [#7476](https://github.com/rust-lang/cargo/pull/7476) -- crates.io now supports SPDX 3.6 licenses. - [#7481](https://github.com/rust-lang/cargo/pull/7481) -- Improved cyclic dependency error message. - [#7470](https://github.com/rust-lang/cargo/pull/7470) -- Bare `cargo clean` no longer locks the package cache. - [#7502](https://github.com/rust-lang/cargo/pull/7502) -- `cargo publish` now allows dev-dependencies without a version key to be - published. A git or path-only dev-dependency will be removed from the - package manifest before uploading. - [#7333](https://github.com/rust-lang/cargo/pull/7333) -- `--features` and `--no-default-features` in the root of a virtual workspace - will now generate an error instead of being ignored. - [#7507](https://github.com/rust-lang/cargo/pull/7507) -- Generated files (like `Cargo.toml` and `Cargo.lock`) in a package archive - now have their timestamp set to the current time instead of the epoch. - [#7523](https://github.com/rust-lang/cargo/pull/7523) -- The `-Z` flag parser is now more strict, rejecting more invalid syntax. - [#7531](https://github.com/rust-lang/cargo/pull/7531) - -### Fixed -- Fixed an issue where if a package had an `include` field, and `Cargo.lock` - in `.gitignore`, and a binary or example target, and the `Cargo.lock` exists - in the current project, it would fail to publish complaining the - `Cargo.lock` was dirty. - [#7448](https://github.com/rust-lang/cargo/pull/7448) -- Fixed a panic in a particular combination of `[patch]` entries. - [#7452](https://github.com/rust-lang/cargo/pull/7452) -- Windows: Better error message when `cargo test` or `rustc` crashes in an - abnormal way, such as a signal or seg fault. - [#7535](https://github.com/rust-lang/cargo/pull/7535) - -### Nightly only -- The `mtime-on-use` feature may now be enabled via the - `unstable.mtime_on_use` config option. - [#7411](https://github.com/rust-lang/cargo/pull/7411) -- Added support for named profiles. - [#6989](https://github.com/rust-lang/cargo/pull/6989) -- Added `-Zpanic-abort-tests` to allow building and running tests with the - "abort" panic strategy. - [#7460](https://github.com/rust-lang/cargo/pull/7460) -- Changed `build-std` to use `--sysroot`. - [#7421](https://github.com/rust-lang/cargo/pull/7421) -- Various fixes and enhancements to `-Ztimings`. - [#7395](https://github.com/rust-lang/cargo/pull/7395) - [#7398](https://github.com/rust-lang/cargo/pull/7398) - [#7397](https://github.com/rust-lang/cargo/pull/7397) - [#7403](https://github.com/rust-lang/cargo/pull/7403) - [#7428](https://github.com/rust-lang/cargo/pull/7428) - [#7429](https://github.com/rust-lang/cargo/pull/7429) -- Profile overrides have renamed the syntax to be - `[profile.dev.package.NAME]`. - [#7504](https://github.com/rust-lang/cargo/pull/7504) -- Fixed warnings for unused profile overrides in a workspace. - [#7536](https://github.com/rust-lang/cargo/pull/7536) - -## Cargo 1.39 (2019-11-07) -[e853aa97...1c6ec66d](https://github.com/rust-lang/cargo/compare/e853aa97...1c6ec66d) - -### Added -- Config files may now use the `.toml` filename extension. - [#7295](https://github.com/rust-lang/cargo/pull/7295) -- The `--workspace` flag has been added as an alias for `--all` to help avoid - confusion about the meaning of "all". - [#7241](https://github.com/rust-lang/cargo/pull/7241) -- The `publish` field has been added to `cargo metadata`. - [#7354](https://github.com/rust-lang/cargo/pull/7354) - -### Changed -- Display more information if parsing the output from `rustc` fails. - [#7236](https://github.com/rust-lang/cargo/pull/7236) -- TOML errors now show the column number. - [#7248](https://github.com/rust-lang/cargo/pull/7248) -- `cargo vendor` no longer deletes files in the `vendor` directory that starts - with a `.`. - [#7242](https://github.com/rust-lang/cargo/pull/7242) -- `cargo fetch` will now show manifest warnings. - [#7243](https://github.com/rust-lang/cargo/pull/7243) -- `cargo publish` will now check git submodules if they contain any - uncommitted changes. - [#7245](https://github.com/rust-lang/cargo/pull/7245) -- In a build script, `cargo:rustc-flags` now allows `-l` and `-L` flags - without spaces. - [#7257](https://github.com/rust-lang/cargo/pull/7257) -- When `cargo install` replaces an older version of a package it will now - delete any installed binaries that are no longer present in the newly - installed version. - [#7246](https://github.com/rust-lang/cargo/pull/7246) -- A git dependency may now also specify a `version` key when published. The - `git` value will be stripped from the uploaded crate, matching the behavior - of `path` dependencies. - [#7237](https://github.com/rust-lang/cargo/pull/7237) -- The behavior of workspace default-members has changed. The default-members - now only applies when running Cargo in the root of the workspace. Previously - it would always apply regardless of which directory Cargo is running in. - [#7270](https://github.com/rust-lang/cargo/pull/7270) -- libgit2 updated pulling in all upstream changes. - [#7275](https://github.com/rust-lang/cargo/pull/7275) -- Bump `home` dependency for locating home directories. - [#7277](https://github.com/rust-lang/cargo/pull/7277) -- zsh completions have been updated. - [#7296](https://github.com/rust-lang/cargo/pull/7296) -- SSL connect errors are now retried. - [#7318](https://github.com/rust-lang/cargo/pull/7318) -- The jobserver has been changed to acquire N tokens (instead of N-1), and - then immediately acquires the extra token. This was changed to accommodate - the `cc` crate on Windows to allow it to release its implicit token. - [#7344](https://github.com/rust-lang/cargo/pull/7344) -- The scheduling algorithm for choosing which crate to build next has been - changed. It now chooses the crate with the greatest number of transitive - crates waiting on it. Previously it used a maximum topological depth. - [#7390](https://github.com/rust-lang/cargo/pull/7390) -- RUSTFLAGS are no longer incorporated in the metadata and filename hash, - reversing the change from 1.33 that added it. This means that any change to - RUSTFLAGS will cause a recompile, and will not affect symbol munging. - [#7459](https://github.com/rust-lang/cargo/pull/7459) - -### Fixed -- Git dependencies with submodules with shorthand SSH URLs (like - `git@github.com/user/repo.git`) should now work. - [#7238](https://github.com/rust-lang/cargo/pull/7238) -- Handle broken symlinks when creating `.dSYM` symlinks on macOS. - [#7268](https://github.com/rust-lang/cargo/pull/7268) -- Fixed issues with multiple versions of the same crate in a `[patch]` table. - [#7303](https://github.com/rust-lang/cargo/pull/7303) -- Fixed issue with custom target `.json` files where a substring of the name - matches an unsupported crate type (like "bin"). - [#7363](https://github.com/rust-lang/cargo/issues/7363) -- Fixed issues with generating documentation for proc-macro crate types. - [#7159](https://github.com/rust-lang/cargo/pull/7159) -- Fixed hang if Cargo panics within a build thread. - [#7366](https://github.com/rust-lang/cargo/pull/7366) -- Fixed rebuild detection if a `build.rs` script issues different `rerun-if` - directives between builds. Cargo was erroneously causing a rebuild after the - change. - [#7373](https://github.com/rust-lang/cargo/pull/7373) -- Properly handle canonical URLs for `[patch]` table entries, preventing - the patch from working after the first time it is used. - [#7368](https://github.com/rust-lang/cargo/pull/7368) -- Fixed an issue where integration tests were waiting for the package binary - to finish building before starting their own build. They now may build - concurrently. - [#7394](https://github.com/rust-lang/cargo/pull/7394) -- Fixed accidental change in the previous release on how `--features a b` flag - is interpreted, restoring the original behavior where this is interpreted as - `--features a` along with the argument `b` passed to the command. To pass - multiple features, use quotes around the features to pass multiple features - like `--features "a b"`, or use commas, or use multiple `--features` flags. - [#7419](https://github.com/rust-lang/cargo/pull/7419) - -### Nightly only -- Basic support for building the standard library directly from Cargo has been - added. - ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std)) - [#7216](https://github.com/rust-lang/cargo/pull/7216) -- Added `-Ztimings` feature to generate an HTML report on the time spent on - individual compilation steps. This also may output completion steps on the - console and JSON data. - ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#timings)) - [#7311](https://github.com/rust-lang/cargo/pull/7311) -- Added ability to cross-compile doctests. - ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile)) - [#6892](https://github.com/rust-lang/cargo/pull/6892) - -## Cargo 1.38 (2019-09-26) -[4c1fa54d...23ef9a4e](https://github.com/rust-lang/cargo/compare/4c1fa54d...23ef9a4e) - -### Added -- ๐Ÿ”ฅ Cargo build pipelining has been enabled by default to leverage more idle CPU - parallelism during builds. - [#7143](https://github.com/rust-lang/cargo/pull/7143) -- The `--message-format` option to Cargo can now be specified multiple times and - accepts a comma-separated list of values. In addition to the previous values - it also now accepts `json-diagnostic-short` and - `json-diagnostic-rendered-ansi` which configures the output coming from rustc - in `json` message mode. - [#7214](https://github.com/rust-lang/cargo/pull/7214) -- Cirrus CI badges are now supported on crates.io. - [#7119](https://github.com/rust-lang/cargo/pull/7119) -- A new format for `Cargo.lock` has been introduced. This new format is - intended to avoid source-control merge conflicts more often, and to - generally make it safer to merge changes. This new format is *not* enabled - at this time, though Cargo will use it if it sees it. At some point in the - future, it is intended that this will become the default. - [#7070](https://github.com/rust-lang/cargo/pull/7070) -- Progress bar support added for FreeBSD. - [#7222](https://github.com/rust-lang/cargo/pull/7222) - -### Changed -- The `-q` flag will no longer suppress the root error message for an error - from Cargo itself. - [#7116](https://github.com/rust-lang/cargo/pull/7116) -- The Cargo Book is now published with mdbook 0.3 providing a number of - formatting fixes and improvements. - [#7140](https://github.com/rust-lang/cargo/pull/7140) -- The `--features` command-line flag can now be specified multiple times. - The list of features from all the flags are joined together. - [#7084](https://github.com/rust-lang/cargo/pull/7084) -- Package include/exclude glob-vs-gitignore warnings have been removed. - Packages may now use gitignore-style matching without producing any - warnings. - [#7170](https://github.com/rust-lang/cargo/pull/7170) -- Cargo now shows the command and output when parsing `rustc` output fails - when querying `rustc` for information like `cfg` values. - [#7185](https://github.com/rust-lang/cargo/pull/7185) -- `cargo package`/`cargo publish` now allows a symbolic link to a git - submodule to include that submodule. - [#6817](https://github.com/rust-lang/cargo/pull/6817) -- Improved the error message when a version requirement does not - match any versions, but there are pre-release versions available. - [#7191](https://github.com/rust-lang/cargo/pull/7191) - -### Fixed -- Fixed using the wrong directory when updating git repositories when using - the `git-fetch-with-cli` config option, and the `GIT_DIR` environment - variable is set. This may happen when running cargo from git callbacks. - [#7082](https://github.com/rust-lang/cargo/pull/7082) -- Fixed dep-info files being overwritten for targets that have separate debug - outputs. For example, binaries on `-apple-` targets with `.dSYM` directories - would overwrite the `.d` file. - [#7057](https://github.com/rust-lang/cargo/pull/7057) -- Fix `[patch]` table not preserving "one major version per source" rule. - [#7118](https://github.com/rust-lang/cargo/pull/7118) -- Ignore `--remap-path-prefix` flags for the metadata hash in the `cargo - rustc` command. This was causing the remap settings to inadvertently affect - symbol names. - [#7134](https://github.com/rust-lang/cargo/pull/7134) -- Fixed cycle detection in `[patch]` dependencies. - [#7174](https://github.com/rust-lang/cargo/pull/7174) -- Fixed `cargo new` leaving behind a symlink on Windows when `core.symlinks` - git config is true. Also adds a number of fixes and updates from upstream - libgit2. - [#7176](https://github.com/rust-lang/cargo/pull/7176) -- macOS: Fixed setting the flag to mark the `target` directory to be excluded - from backups. - [#7192](https://github.com/rust-lang/cargo/pull/7192) -- Fixed `cargo fix` panicking under some situations involving multi-byte - characters. - [#7221](https://github.com/rust-lang/cargo/pull/7221) - -### Nightly only -- Added `cargo fix --clippy` which will apply machine-applicable fixes from - Clippy. - [#7069](https://github.com/rust-lang/cargo/pull/7069) -- Added `-Z binary-dep-depinfo` flag to add change tracking for binary - dependencies like the standard library. - [#7137](https://github.com/rust-lang/cargo/pull/7137) - [#7219](https://github.com/rust-lang/cargo/pull/7219) -- `cargo clippy-preview` will always run, even if no changes have been made. - [#7157](https://github.com/rust-lang/cargo/pull/7157) -- Fixed exponential blowup when using `CARGO_BUILD_PIPELINING`. - [#7062](https://github.com/rust-lang/cargo/pull/7062) -- Fixed passing args to clippy in `cargo clippy-preview`. - [#7162](https://github.com/rust-lang/cargo/pull/7162) - -## Cargo 1.37 (2019-08-15) -[c4fcfb72...9edd0891](https://github.com/rust-lang/cargo/compare/c4fcfb72...9edd0891) - -### Added -- Added `doctest` field to `cargo metadata` to determine if a target's - documentation is tested. - [#6953](https://github.com/rust-lang/cargo/pull/6953) - [#6965](https://github.com/rust-lang/cargo/pull/6965) -- ๐Ÿ”ฅ The [`cargo - vendor`](https://doc.rust-lang.org/nightly/cargo/commands/cargo-vendor.html) - command is now built-in to Cargo. This command may be used to create a local - copy of the sources of all dependencies. - [#6869](https://github.com/rust-lang/cargo/pull/6869) -- ๐Ÿ”ฅ The "publish lockfile" feature is now stable. This feature will - automatically include the `Cargo.lock` file when a package is published if - it contains a binary executable target. By default, Cargo will ignore - `Cargo.lock` when installing a package. To force Cargo to use the - `Cargo.lock` file included in the published package, use `cargo install - --locked`. This may be useful to ensure that `cargo install` consistently - reproduces the same result. It may also be useful when a semver-incompatible - change is accidentally published to a dependency, providing a way to fall - back to a version that is known to work. - [#7026](https://github.com/rust-lang/cargo/pull/7026) -- ๐Ÿ”ฅ The `default-run` feature has been stabilized. This feature allows you to - specify which binary executable to run by default with `cargo run` when a - package includes multiple binaries. Set the `default-run` key in the - `[package]` table in `Cargo.toml` to the name of the binary to use by - default. - [#7056](https://github.com/rust-lang/cargo/pull/7056) - -### Changed -- `cargo package` now verifies that build scripts do not create empty - directories. - [#6973](https://github.com/rust-lang/cargo/pull/6973) -- A warning is now issued if `cargo doc` generates duplicate outputs, which - causes files to be randomly stomped on. This may happen for a variety of - reasons (renamed dependencies, multiple versions of the same package, - packages with renamed libraries, etc.). This is a known bug, which needs - more work to handle correctly. - [#6998](https://github.com/rust-lang/cargo/pull/6998) -- Enabling a dependency's feature with `--features foo/bar` will no longer - compile the current crate with the `foo` feature if `foo` is not an optional - dependency. - [#7010](https://github.com/rust-lang/cargo/pull/7010) -- If `--remap-path-prefix` is passed via RUSTFLAGS, it will no longer affect - the filename metadata hash. - [#6966](https://github.com/rust-lang/cargo/pull/6966) -- libgit2 has been updated to 0.28.2, which Cargo uses to access git - repositories. This brings in hundreds of changes and fixes since it was last - updated in November. - [#7018](https://github.com/rust-lang/cargo/pull/7018) -- Cargo now supports absolute paths in the dep-info files generated by rustc. - This is laying the groundwork for [tracking - binaries](https://github.com/rust-lang/rust/pull/61727), such as libstd, for - rebuild detection. (Note: this contains a known bug.) - [#7030](https://github.com/rust-lang/cargo/pull/7030) - -### Fixed -- Fixed how zsh completions fetch the list of commands. - [#6956](https://github.com/rust-lang/cargo/pull/6956) -- "+ debuginfo" is no longer printed in the build summary when `debug` is set - to 0. - [#6971](https://github.com/rust-lang/cargo/pull/6971) -- Fixed `cargo doc` with an example configured with `doc = true` to document - correctly. - [#7023](https://github.com/rust-lang/cargo/pull/7023) -- Don't fail if a read-only lock cannot be acquired in CARGO_HOME. This helps - when CARGO_HOME doesn't exist, but `--locked` is used which means CARGO_HOME - is not needed. - [#7149](https://github.com/rust-lang/cargo/pull/7149) -- Reverted a change in 1.35 which released jobserver tokens when Cargo blocked - on a lock file. It caused a deadlock in some situations. - [#7204](https://github.com/rust-lang/cargo/pull/7204) - -### Nightly only -- Added [compiler message - caching](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#cache-messages). - The `-Z cache-messages` flag makes cargo cache the compiler output so that - future runs can redisplay previous warnings. - [#6933](https://github.com/rust-lang/cargo/pull/6933) -- `-Z mtime-on-use` no longer touches intermediate artifacts. - [#7050](https://github.com/rust-lang/cargo/pull/7050) - -## Cargo 1.36 (2019-07-04) -[6f3e9c36...c4fcfb72](https://github.com/rust-lang/cargo/compare/6f3e9c36...c4fcfb72) - -### Added -- Added more detailed documentation on target auto-discovery. - [#6898](https://github.com/rust-lang/cargo/pull/6898) -- ๐Ÿ”ฅ Stabilize the `--offline` flag which allows using cargo without a network - connection. - [#6934](https://github.com/rust-lang/cargo/pull/6934) - [#6871](https://github.com/rust-lang/cargo/pull/6871) - -### Changed -- `publish = ["crates-io"]` may be added to the manifest to restrict - publishing to crates.io only. - [#6838](https://github.com/rust-lang/cargo/pull/6838) -- macOS: Only include the default paths if `DYLD_FALLBACK_LIBRARY_PATH` is not - set. Also, remove `/lib` from the default set. - [#6856](https://github.com/rust-lang/cargo/pull/6856) -- `cargo publish` will now exit early if the login token is not available. - [#6854](https://github.com/rust-lang/cargo/pull/6854) -- HTTP/2 stream errors are now considered "spurious" and will cause a retry. - [#6861](https://github.com/rust-lang/cargo/pull/6861) -- Setting a feature on a dependency where that feature points to a *required* - dependency is now an error. Previously it was a warning. - [#6860](https://github.com/rust-lang/cargo/pull/6860) -- The `registry.index` config value now supports relative `file:` URLs. - [#6873](https://github.com/rust-lang/cargo/pull/6873) -- macOS: The `.dSYM` directory is now symbolically linked next to example - binaries without the metadata hash so that debuggers can find it. - [#6891](https://github.com/rust-lang/cargo/pull/6891) -- The default `Cargo.toml` template for now projects now includes a comment - providing a link to the documentation. - [#6881](https://github.com/rust-lang/cargo/pull/6881) -- Some improvements to the wording of the crate download summary. - [#6916](https://github.com/rust-lang/cargo/pull/6916) - [#6920](https://github.com/rust-lang/cargo/pull/6920) -- โœจ Changed `RUST_LOG` environment variable to `CARGO_LOG` so that user code - that uses the `log` crate will not display cargo's debug output. - [#6918](https://github.com/rust-lang/cargo/pull/6918) -- `Cargo.toml` is now always included when packaging, even if it is not listed - in `package.include`. - [#6925](https://github.com/rust-lang/cargo/pull/6925) -- Package include/exclude values now use gitignore patterns instead of glob - patterns. [#6924](https://github.com/rust-lang/cargo/pull/6924) -- Provide a better error message when crates.io times out. Also improve error - messages with other HTTP response codes. - [#6936](https://github.com/rust-lang/cargo/pull/6936) - -### Performance -- Resolver performance improvements for some cases. - [#6853](https://github.com/rust-lang/cargo/pull/6853) -- Optimized how cargo reads the index JSON files by caching the results. - [#6880](https://github.com/rust-lang/cargo/pull/6880) - [#6912](https://github.com/rust-lang/cargo/pull/6912) - [#6940](https://github.com/rust-lang/cargo/pull/6940) -- Various performance improvements. - [#6867](https://github.com/rust-lang/cargo/pull/6867) - -### Fixed -- More carefully track the on-disk fingerprint information for dependencies. - This can help in some rare cases where the build is interrupted and - restarted. [#6832](https://github.com/rust-lang/cargo/pull/6832) -- `cargo run` now correctly passes non-UTF8 arguments to the child process. - [#6849](https://github.com/rust-lang/cargo/pull/6849) -- Fixed bash completion to run on bash 3.2, the stock version in macOS. - [#6905](https://github.com/rust-lang/cargo/pull/6905) -- Various fixes and improvements to zsh completion. - [#6926](https://github.com/rust-lang/cargo/pull/6926) - [#6929](https://github.com/rust-lang/cargo/pull/6929) -- Fix `cargo update` ignoring `-p` arguments if the `Cargo.lock` file was - missing. - [#6904](https://github.com/rust-lang/cargo/pull/6904) - -### Nightly only -- Added [`-Z install-upgrade` - feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#install-upgrade) - to track details about installed crates and to update them if they are - out-of-date. [#6798](https://github.com/rust-lang/cargo/pull/6798) -- Added the [`public-dependency` - feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#public-dependency) - which allows tracking public versus private dependencies. - [#6772](https://github.com/rust-lang/cargo/pull/6772) -- Added build pipelining via the `build.pipelining` config - option (`CARGO_BUILD_PIPELINING` env var). - [#6883](https://github.com/rust-lang/cargo/pull/6883) -- The `publish-lockfile` feature has had some significant changes. The default - is now `true`, the `Cargo.lock` will always be published for binary crates. - The `Cargo.lock` is now regenerated during publishing. `cargo install` now - ignores the `Cargo.lock` file by default, and requires `--locked` to use the - lock file. Warnings have been added if yanked dependencies are detected. - [#6840](https://github.com/rust-lang/cargo/pull/6840) - -## Cargo 1.35 (2019-05-23) -[6789d8a0...6f3e9c36](https://github.com/rust-lang/cargo/compare/6789d8a0...6f3e9c36) - -### Added -- Added the `rustc-cdylib-link-arg` key for build scripts to specify linker - arguments for cdylib crates. - [#6298](https://github.com/rust-lang/cargo/pull/6298) - -### Changed -- When passing a test filter, such as `cargo test foo`, don't build examples - (unless they set `test = true`). - [#6683](https://github.com/rust-lang/cargo/pull/6683) -- Forward the `--quiet` flag from `cargo test` to the libtest harness so that - tests are actually quiet. - [#6358](https://github.com/rust-lang/cargo/pull/6358) -- The verification step in `cargo package` that checks if any files are - modified is now stricter. It uses a hash of the contents instead of checking - filesystem mtimes. It also checks *all* files in the package. - [#6740](https://github.com/rust-lang/cargo/pull/6740) -- Jobserver tokens are now released whenever Cargo blocks on a file lock. - [#6748](https://github.com/rust-lang/cargo/pull/6748) -- Issue a warning for a previous bug in the TOML parser that allowed multiple - table headers with the same name. - [#6761](https://github.com/rust-lang/cargo/pull/6761) -- Removed the `CARGO_PKG_*` environment variables from the metadata hash and - added them to the fingerprint instead. This means that when these values - change, stale artifacts are not left behind. Also added the "repository" - value to the fingerprint. - [#6785](https://github.com/rust-lang/cargo/pull/6785) -- `cargo metadata` no longer shows a `null` field for a dependency without a - library in `resolve.nodes.deps`. The dependency is no longer shown. - [#6534](https://github.com/rust-lang/cargo/pull/6534) -- `cargo new` will no longer include an email address in the `authors` field - if it is set to the empty string. - [#6802](https://github.com/rust-lang/cargo/pull/6802) -- `cargo doc --open` now works when documenting multiple packages. - [#6803](https://github.com/rust-lang/cargo/pull/6803) -- `cargo install --path P` now loads the `.cargo/config` file from the - directory P. [#6805](https://github.com/rust-lang/cargo/pull/6805) -- Using semver metadata in a version requirement (such as `1.0.0+1234`) now - issues a warning that it is ignored. - [#6806](https://github.com/rust-lang/cargo/pull/6806) -- `cargo install` now rejects certain combinations of flags where some flags - would have been ignored. - [#6801](https://github.com/rust-lang/cargo/pull/6801) -- Resolver performance improvements for some cases. - [#6776](https://github.com/rust-lang/cargo/pull/6776) - -### Fixed -- Fixed running separate commands (such as `cargo build` then `cargo test`) - where the second command could use stale results from a build script. - [#6720](https://github.com/rust-lang/cargo/pull/6720) -- Fixed `cargo fix` not working properly if a `.gitignore` file that matched - the root package directory. - [#6767](https://github.com/rust-lang/cargo/pull/6767) -- Fixed accidentally compiling a lib multiple times if `panic=unwind` was set - in a profile. [#6781](https://github.com/rust-lang/cargo/pull/6781) -- Paths to JSON files in `build.target` config value are now canonicalized to - fix building dependencies. - [#6778](https://github.com/rust-lang/cargo/pull/6778) -- Fixed re-running a build script if its compilation was interrupted (such as - if it is killed). [#6782](https://github.com/rust-lang/cargo/pull/6782) -- Fixed `cargo new` initializing a fossil repo. - [#6792](https://github.com/rust-lang/cargo/pull/6792) -- Fixed supporting updating a git repo that has a force push when using the - `git-fetch-with-cli` feature. `git-fetch-with-cli` also shows more error - information now when it fails. - [#6800](https://github.com/rust-lang/cargo/pull/6800) -- `--example` binaries built for the WASM target are fixed to no longer - include a metadata hash in the filename, and are correctly emitted in the - `compiler-artifact` JSON message. - [#6812](https://github.com/rust-lang/cargo/pull/6812) - -### Nightly only -- `cargo clippy-preview` is now a built-in cargo command. - [#6759](https://github.com/rust-lang/cargo/pull/6759) -- The `build-override` profile setting now includes proc-macros and their - dependencies. - [#6811](https://github.com/rust-lang/cargo/pull/6811) -- Optional and target dependencies now work better with `-Z offline`. - [#6814](https://github.com/rust-lang/cargo/pull/6814) - -## Cargo 1.34 (2019-04-11) -[f099fe94...6789d8a0](https://github.com/rust-lang/cargo/compare/f099fe94...6789d8a0) - -### Added -- ๐Ÿ”ฅ Stabilized support for [alternate - registries](https://doc.rust-lang.org/1.34.0/cargo/reference/registries.html). - [#6654](https://github.com/rust-lang/cargo/pull/6654) -- Added documentation on using builds.sr.ht Continuous Integration with Cargo. - [#6565](https://github.com/rust-lang/cargo/pull/6565) -- `Cargo.lock` now includes a comment at the top that it is `@generated`. - [#6548](https://github.com/rust-lang/cargo/pull/6548) -- Azure DevOps badges are now supported. - [#6264](https://github.com/rust-lang/cargo/pull/6264) -- Added a warning if `--exclude` flag specifies an unknown package. - [#6679](https://github.com/rust-lang/cargo/pull/6679) - -### Changed -- `cargo test --doc --no-run` doesn't do anything, so it now displays an error - to that effect. [#6628](https://github.com/rust-lang/cargo/pull/6628) -- Various updates to bash completion: add missing options and commands, - support libtest completions, use rustup for `--target` completion, fallback - to filename completion, fix editing the command line. - [#6644](https://github.com/rust-lang/cargo/pull/6644) -- Publishing a crate with a `[patch]` section no longer generates an error. - The `[patch]` section is removed from the manifest before publishing. - [#6535](https://github.com/rust-lang/cargo/pull/6535) -- `build.incremental = true` config value is now treated the same as - `CARGO_INCREMENTAL=1`, previously it was ignored. - [#6688](https://github.com/rust-lang/cargo/pull/6688) -- Errors from a registry are now always displayed regardless of the HTTP - response code. [#6771](https://github.com/rust-lang/cargo/pull/6771) - -### Fixed -- Fixed bash completion for `cargo run --example`. - [#6578](https://github.com/rust-lang/cargo/pull/6578) -- Fixed a race condition when using a *local* registry and running multiple - cargo commands at the same time that build the same crate. - [#6591](https://github.com/rust-lang/cargo/pull/6591) -- Fixed some flickering and excessive updates of the progress bar. - [#6615](https://github.com/rust-lang/cargo/pull/6615) -- Fixed a hang when using a git credential helper that returns incorrect - credentials. [#6681](https://github.com/rust-lang/cargo/pull/6681) -- Fixed resolving yanked crates with a local registry. - [#6750](https://github.com/rust-lang/cargo/pull/6750) - -### Nightly only -- Added `-Z mtime-on-use` flag to cause the mtime to be updated on the - filesystem when a crate is used. This is intended to be able to track stale - artifacts in the future for cleaning up unused files. - [#6477](https://github.com/rust-lang/cargo/pull/6477) - [#6573](https://github.com/rust-lang/cargo/pull/6573) -- Added experimental `-Z dual-proc-macros` to build proc macros for both the - host and the target. - [#6547](https://github.com/rust-lang/cargo/pull/6547) - -## Cargo 1.33 (2019-02-28) -[8610973a...f099fe94](https://github.com/rust-lang/cargo/compare/8610973a...f099fe94) - -### Added -- `compiler-artifact` JSON messages now include an `"executable"` key which - includes the path to the executable that was built. - [#6363](https://github.com/rust-lang/cargo/pull/6363) -- The man pages have been rewritten, and are now published with the web - documentation. [#6405](https://github.com/rust-lang/cargo/pull/6405) -- `cargo login` now displays a confirmation after saving the token. - [#6466](https://github.com/rust-lang/cargo/pull/6466) -- A warning is now emitted if a `[patch]` entry does not match any package. - [#6470](https://github.com/rust-lang/cargo/pull/6470) -- `cargo metadata` now includes the `links` key for a package. - [#6480](https://github.com/rust-lang/cargo/pull/6480) -- "Very verbose" output with `-vv` now displays the environment variables that - cargo sets when it runs a process. - [#6492](https://github.com/rust-lang/cargo/pull/6492) -- `--example`, `--bin`, `--bench`, or `--test` without an argument now lists - the available targets for those options. - [#6505](https://github.com/rust-lang/cargo/pull/6505) -- Windows: If a process fails with an extended status exit code, a - human-readable name for the code is now displayed. - [#6532](https://github.com/rust-lang/cargo/pull/6532) -- Added `--features`, `--no-default-features`, and `--all-features` flags to - the `cargo package` and `cargo publish` commands to use the given features - when verifying the package. - [#6453](https://github.com/rust-lang/cargo/pull/6453) - -### Changed -- If `cargo fix` fails to compile the fixed code, the rustc errors are now - displayed on the console. - [#6419](https://github.com/rust-lang/cargo/pull/6419) -- Hide the `--host` flag from `cargo login`, it is unused. - [#6466](https://github.com/rust-lang/cargo/pull/6466) -- Build script fingerprints now include the rustc version. - [#6473](https://github.com/rust-lang/cargo/pull/6473) -- macOS: Switched to setting `DYLD_FALLBACK_LIBRARY_PATH` instead of - `DYLD_LIBRARY_PATH`. [#6355](https://github.com/rust-lang/cargo/pull/6355) -- `RUSTFLAGS` is now included in the metadata hash, meaning that changing - the flags will not overwrite previously built files. - [#6503](https://github.com/rust-lang/cargo/pull/6503) -- When updating the crate graph, unrelated yanked crates were erroneously - removed. They are now kept at their original version if possible. This was - causing unrelated packages to be downgraded during `cargo update -p - somecrate`. [#5702](https://github.com/rust-lang/cargo/issues/5702) -- TOML files now support the [0.5 TOML - syntax](https://github.com/toml-lang/toml/blob/master/CHANGELOG.md#050--2018-07-11). - -### Fixed -- `cargo fix` will now ignore suggestions that modify multiple files. - [#6402](https://github.com/rust-lang/cargo/pull/6402) -- `cargo fix` will now only fix one target at a time, to deal with targets - which share the same source files. - [#6434](https://github.com/rust-lang/cargo/pull/6434) -- Fixed bash completion showing the list of cargo commands. - [#6461](https://github.com/rust-lang/cargo/issues/6461) -- `cargo init` will now avoid creating duplicate entries in `.gitignore` - files. [#6521](https://github.com/rust-lang/cargo/pull/6521) -- Builds now attempt to detect if a file is modified in the middle of a - compilation, allowing you to build again and pick up the new changes. This - is done by keeping track of when the compilation *starts* not when it - finishes. Also, [#5919](https://github.com/rust-lang/cargo/pull/5919) was - reverted, meaning that cargo does *not* treat equal filesystem mtimes as - requiring a rebuild. [#6484](https://github.com/rust-lang/cargo/pull/6484) - -### Nightly only -- Allow using registry *names* in `[patch]` tables instead of just URLs. - [#6456](https://github.com/rust-lang/cargo/pull/6456) -- `cargo metadata` added the `registry` key for dependencies. - [#6500](https://github.com/rust-lang/cargo/pull/6500) -- Registry names are now restricted to the same style as - package names (alphanumeric, `-` and `_` characters). - [#6469](https://github.com/rust-lang/cargo/pull/6469) -- `cargo login` now displays the `/me` URL from the registry config. - [#6466](https://github.com/rust-lang/cargo/pull/6466) -- `cargo login --registry=NAME` now supports interactive input for the token. - [#6466](https://github.com/rust-lang/cargo/pull/6466) -- Registries may now elide the `api` key from `config.json` to indicate they - do not support API access. - [#6466](https://github.com/rust-lang/cargo/pull/6466) -- Fixed panic when using `--message-format=json` with metabuild. - [#6432](https://github.com/rust-lang/cargo/pull/6432) -- Fixed detection of publishing to crates.io when using alternate registries. - [#6525](https://github.com/rust-lang/cargo/pull/6525) - -## Cargo 1.32 (2019-01-17) -[339d9f9c...8610973a](https://github.com/rust-lang/cargo/compare/339d9f9c...8610973a) - -### Added -- Registries may now display warnings after a successful publish. - [#6303](https://github.com/rust-lang/cargo/pull/6303) -- Added a [glossary](https://doc.rust-lang.org/cargo/appendix/glossary.html) - to the documentation. [#6321](https://github.com/rust-lang/cargo/pull/6321) -- Added the alias `c` for `cargo check`. - [#6218](https://github.com/rust-lang/cargo/pull/6218) - -### Changed -- ๐Ÿ”ฅ HTTP/2 multiplexing is now enabled by default. The `http.multiplexing` - config value may be used to disable it. - [#6271](https://github.com/rust-lang/cargo/pull/6271) -- Use ANSI escape sequences to clear lines instead of spaces. - [#6233](https://github.com/rust-lang/cargo/pull/6233) -- Disable git templates when checking out git dependencies, which can cause - problems. [#6252](https://github.com/rust-lang/cargo/pull/6252) -- Include the `--update-head-ok` git flag when using the - `net.git-fetch-with-cli` option. This can help prevent failures when - fetching some repositories. - [#6250](https://github.com/rust-lang/cargo/pull/6250) -- When extracting a crate during the verification step of `cargo package`, the - filesystem mtimes are no longer set, which was failing on some rare - filesystems. [#6257](https://github.com/rust-lang/cargo/pull/6257) -- `crate-type = ["proc-macro"]` is now treated the same as `proc-macro = true` - in `Cargo.toml`. [#6256](https://github.com/rust-lang/cargo/pull/6256) -- An error is raised if `dependencies`, `features`, `target`, or `badges` is - set in a virtual workspace. Warnings are displayed if `replace` or `patch` - is used in a workspace member. - [#6276](https://github.com/rust-lang/cargo/pull/6276) -- Improved performance of the resolver in some cases. - [#6283](https://github.com/rust-lang/cargo/pull/6283) - [#6366](https://github.com/rust-lang/cargo/pull/6366) -- `.rmeta` files are no longer hard-linked into the base target directory - (`target/debug`). [#6292](https://github.com/rust-lang/cargo/pull/6292) -- A warning is issued if multiple targets are built with the same output - filenames. [#6308](https://github.com/rust-lang/cargo/pull/6308) -- When using `cargo build` (without `--release`) benchmarks are now built - using the "test" profile instead of "bench". This makes it easier to debug - benchmarks, and avoids confusing behavior. - [#6309](https://github.com/rust-lang/cargo/pull/6309) -- User aliases may now override built-in aliases (`b`, `r`, `t`, and `c`). - [#6259](https://github.com/rust-lang/cargo/pull/6259) -- Setting `autobins=false` now disables auto-discovery of inferred targets. - [#6329](https://github.com/rust-lang/cargo/pull/6329) -- `cargo verify-project` will now fail on stable if the project uses unstable - features. [#6326](https://github.com/rust-lang/cargo/pull/6326) -- Platform targets with an internal `.` within the name are now allowed. - [#6255](https://github.com/rust-lang/cargo/pull/6255) -- `cargo clean --release` now only deletes the release directory. - [#6349](https://github.com/rust-lang/cargo/pull/6349) - -### Fixed -- Avoid adding extra angle brackets in email address for `cargo new`. - [#6243](https://github.com/rust-lang/cargo/pull/6243) -- The progress bar is disabled if the CI environment variable is set. - [#6281](https://github.com/rust-lang/cargo/pull/6281) -- Avoid retaining all rustc output in memory. - [#6289](https://github.com/rust-lang/cargo/pull/6289) -- If JSON parsing fails, and rustc exits nonzero, don't lose the parse failure - message. [#6290](https://github.com/rust-lang/cargo/pull/6290) -- Fixed renaming a project directory with build scripts. - [#6328](https://github.com/rust-lang/cargo/pull/6328) -- Fixed `cargo run --example NAME` to work correctly if the example sets - `crate_type = ["bin"]`. - [#6330](https://github.com/rust-lang/cargo/pull/6330) -- Fixed issue with `cargo package` git discovery being too aggressive. The - `--allow-dirty` now completely disables the git repo checks. - [#6280](https://github.com/rust-lang/cargo/pull/6280) -- Fixed build change tracking for `[patch]` deps which resulted in `cargo - build` rebuilding when it shouldn't. - [#6493](https://github.com/rust-lang/cargo/pull/6493) - -### Nightly only -- Allow usernames in registry URLs. - [#6242](https://github.com/rust-lang/cargo/pull/6242) -- Added `"compile_mode"` key to the build-plan JSON structure to be able to - distinguish running a custom build script versus compiling the build script. - [#6331](https://github.com/rust-lang/cargo/pull/6331) -- `--out-dir` no longer copies over build scripts. - [#6300](https://github.com/rust-lang/cargo/pull/6300) - -## Cargo 1.31 (2018-12-06) -[36d96825...339d9f9c](https://github.com/rust-lang/cargo/compare/36d96825...339d9f9c) - -### Added -- ๐Ÿ”ฅ Stabilized support for the 2018 edition. - [#5984](https://github.com/rust-lang/cargo/pull/5984) - [#5989](https://github.com/rust-lang/cargo/pull/5989) -- ๐Ÿ”ฅ Added the ability to [rename - dependencies](https://doc.rust-lang.org/1.31.0/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml) - in Cargo.toml. [#6319](https://github.com/rust-lang/cargo/pull/6319) -- ๐Ÿ”ฅ Added support for HTTP/2 pipelining and multiplexing. Set the - `http.multiplexing` config value to enable. - [#6005](https://github.com/rust-lang/cargo/pull/6005) -- Added `http.debug` configuration value to debug HTTP connections. Use - `CARGO_HTTP_DEBUG=true RUST_LOG=cargo::ops::registry cargo build` to display - the debug information. [#6166](https://github.com/rust-lang/cargo/pull/6166) -- `CARGO_PKG_REPOSITORY` environment variable is set with the repository value - from `Cargo.toml` when building . - [#6096](https://github.com/rust-lang/cargo/pull/6096) - -### Changed -- `cargo test --doc` now rejects other flags instead of ignoring them. - [#6037](https://github.com/rust-lang/cargo/pull/6037) -- `cargo install` ignores `~/.cargo/config`. - [#6026](https://github.com/rust-lang/cargo/pull/6026) -- `cargo version --verbose` is now the same as `cargo -vV`. - [#6076](https://github.com/rust-lang/cargo/pull/6076) -- Comments at the top of `Cargo.lock` are now preserved. - [#6181](https://github.com/rust-lang/cargo/pull/6181) -- When building in "very verbose" mode (`cargo build -vv`), build script - output is prefixed with the package name and version, such as `[foo 0.0.1]`. - [#6164](https://github.com/rust-lang/cargo/pull/6164) -- If `cargo fix --broken-code` fails to compile after fixes have been applied, - the files are no longer reverted and are left in their broken state. - [#6316](https://github.com/rust-lang/cargo/pull/6316) - -### Fixed -- Windows: Pass Ctrl-C to the process with `cargo run`. - [#6004](https://github.com/rust-lang/cargo/pull/6004) -- macOS: Fix bash completion. - [#6038](https://github.com/rust-lang/cargo/pull/6038) -- Support arbitrary toolchain names when completing `+toolchain` in bash - completion. [#6038](https://github.com/rust-lang/cargo/pull/6038) -- Fixed edge cases in the resolver, when backtracking on failed dependencies. - [#5988](https://github.com/rust-lang/cargo/pull/5988) -- Fixed `cargo test --all-targets` running lib tests three times. - [#6039](https://github.com/rust-lang/cargo/pull/6039) -- Fixed publishing renamed dependencies to crates.io. - [#5993](https://github.com/rust-lang/cargo/pull/5993) -- Fixed `cargo install` on a git repo with multiple binaries. - [#6060](https://github.com/rust-lang/cargo/pull/6060) -- Fixed deeply nested JSON emitted by rustc being lost. - [#6081](https://github.com/rust-lang/cargo/pull/6081) -- Windows: Fix locking msys terminals to 60 characters. - [#6122](https://github.com/rust-lang/cargo/pull/6122) -- Fixed renamed dependencies with dashes. - [#6140](https://github.com/rust-lang/cargo/pull/6140) -- Fixed linking against the wrong dylib when the dylib existed in both - `target/debug` and `target/debug/deps`. - [#6167](https://github.com/rust-lang/cargo/pull/6167) -- Fixed some unnecessary recompiles when `panic=abort` is used. - [#6170](https://github.com/rust-lang/cargo/pull/6170) - -### Nightly only -- Added `--registry` flag to `cargo install`. - [#6128](https://github.com/rust-lang/cargo/pull/6128) -- Added `registry.default` configuration value to specify the - default registry to use if `--registry` flag is not passed. - [#6135](https://github.com/rust-lang/cargo/pull/6135) -- Added `--registry` flag to `cargo new` and `cargo init`. - [#6135](https://github.com/rust-lang/cargo/pull/6135) - -## Cargo 1.30 (2018-10-25) -[524a578d...36d96825](https://github.com/rust-lang/cargo/compare/524a578d...36d96825) - -### Added -- ๐Ÿ”ฅ Added an animated progress bar shows progress during building. - [#5995](https://github.com/rust-lang/cargo/pull/5995/) -- Added `resolve.nodes.deps` key to `cargo metadata`, which includes more - information about resolved dependencies, and properly handles renamed - dependencies. [#5871](https://github.com/rust-lang/cargo/pull/5871) -- When creating a package, provide more detail with `-v` when failing to - discover if files are dirty in a git repository. Also fix a problem with - discovery on Windows. [#5858](https://github.com/rust-lang/cargo/pull/5858) -- Filters like `--bin`, `--test`, `--example`, `--bench`, or `--lib` can be - used in a workspace without selecting a specific package. - [#5873](https://github.com/rust-lang/cargo/pull/5873) -- `cargo run` can be used in a workspace without selecting a specific package. - [#5877](https://github.com/rust-lang/cargo/pull/5877) -- `cargo doc --message-format=json` now outputs JSON messages from rustdoc. - [#5878](https://github.com/rust-lang/cargo/pull/5878) -- Added `--message-format=short` to show one-line messages. - [#5879](https://github.com/rust-lang/cargo/pull/5879) -- Added `.cargo_vcs_info.json` file to `.crate` packages that captures the - current git hash. [#5886](https://github.com/rust-lang/cargo/pull/5886) -- Added `net.git-fetch-with-cli` configuration option to use the `git` - executable to fetch repositories instead of using the built-in libgit2 - library. [#5914](https://github.com/rust-lang/cargo/pull/5914) -- Added `required-features` to `cargo metadata`. - [#5902](https://github.com/rust-lang/cargo/pull/5902) -- `cargo uninstall` within a package will now uninstall that package. - [#5927](https://github.com/rust-lang/cargo/pull/5927) -- Added `--allow-staged` flag to `cargo fix` to allow it to run if files are - staged in git. [#5943](https://github.com/rust-lang/cargo/pull/5943) -- Added `net.low-speed-limit` config value, and also honor `net.timeout` for - http operations. [#5957](https://github.com/rust-lang/cargo/pull/5957) -- Added `--edition` flag to `cargo new`. - [#5984](https://github.com/rust-lang/cargo/pull/5984) -- Temporarily stabilized 2018 edition support for the duration of the beta. - [#5984](https://github.com/rust-lang/cargo/pull/5984) - [#5989](https://github.com/rust-lang/cargo/pull/5989) -- Added support for `target.'cfg(โ€ฆ)'.runner` config value to specify the - run/test/bench runner for targets that use config expressions. - [#5959](https://github.com/rust-lang/cargo/pull/5959) - -### Changed -- Windows: `cargo run` will not kill child processes when the main process - exits. [#5887](https://github.com/rust-lang/cargo/pull/5887) -- Switched to the `opener` crate to open a web browser with `cargo doc - --open`. This should more reliably select the system-preferred browser on - all platforms. [#5888](https://github.com/rust-lang/cargo/pull/5888) -- Equal file mtimes now cause a target to be rebuilt. Previously only if files - were strictly *newer* than the last build would it cause a rebuild. - [#5919](https://github.com/rust-lang/cargo/pull/5919) -- Ignore `build.target` config value when running `cargo install`. - [#5874](https://github.com/rust-lang/cargo/pull/5874) -- Ignore `RUSTC_WRAPPER` for `cargo fix`. - [#5983](https://github.com/rust-lang/cargo/pull/5983) -- Ignore empty `RUSTC_WRAPPER`. - [#5985](https://github.com/rust-lang/cargo/pull/5985) - -### Fixed -- Fixed error when creating a package with an edition field in `Cargo.toml`. - [#5908](https://github.com/rust-lang/cargo/pull/5908) -- More consistently use relative paths for path dependencies in a workspace. - [#5935](https://github.com/rust-lang/cargo/pull/5935) -- `cargo fix` now always runs, even if it was run previously. - [#5944](https://github.com/rust-lang/cargo/pull/5944) -- Windows: Attempt to more reliably detect terminal width. msys-based - terminals are forced to 60 characters wide. - [#6010](https://github.com/rust-lang/cargo/pull/6010) -- Allow multiple target flags with `cargo doc --document-private-items`. - [6022](https://github.com/rust-lang/cargo/pull/6022) - -### Nightly only -- Added - [metabuild](https://doc.rust-lang.org/1.30.0/cargo/reference/unstable.html#metabuild). - [#5628](https://github.com/rust-lang/cargo/pull/5628) diff --git a/collector/compile-benchmarks/cargo-0.60.0/CONTRIBUTING.md b/collector/compile-benchmarks/cargo-0.60.0/CONTRIBUTING.md deleted file mode 100644 index 03993b9ff..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/CONTRIBUTING.md +++ /dev/null @@ -1,5 +0,0 @@ -# Contributing to Cargo - -Contributing documentation has moved to the **[Cargo Contributor Guide]**. - -[Cargo Contributor Guide]: https://rust-lang.github.io/cargo/contrib/ diff --git a/collector/compile-benchmarks/cargo-0.60.0/Cargo.lock b/collector/compile-benchmarks/cargo-0.60.0/Cargo.lock deleted file mode 100644 index 8f17ea96b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/Cargo.lock +++ /dev/null @@ -1,1233 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "aho-corasick" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" -dependencies = [ - "memchr", -] - -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - -[[package]] -name = "anyhow" -version = "1.0.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "159bb86af3a200e19a068f4224eae4c8bb2d0fa054c7e5d1cacd5cef95e684cd" - -[[package]] -name = "arrayvec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi", - "libc", - "winapi", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitmaps" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -dependencies = [ - "typenum", -] - -[[package]] -name = "bstr" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata", -] - -[[package]] -name = "bytesize" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c58ec36aac5066d5ca17df51b3e70279f5670a72102f5752cb7e7c856adfc70" - -[[package]] -name = "cargo" -version = "0.60.0" -dependencies = [ - "anyhow", - "atty", - "bytesize", - "cargo-platform", - "cargo-util", - "clap", - "crates-io", - "crossbeam-utils", - "curl", - "curl-sys", - "env_logger 0.9.0", - "filetime", - "flate2", - "fwdansi", - "git2", - "git2-curl", - "glob", - "hex 0.4.3", - "home", - "humantime 2.1.0", - "ignore", - "im-rc", - "itertools", - "jobserver", - "lazy_static", - "lazycell", - "libc", - "libgit2-sys", - "log", - "memchr", - "num_cpus", - "opener", - "openssl", - "os_info", - "percent-encoding", - "pretty_env_logger", - "rustc-workspace-hack", - "rustfix", - "semver", - "serde", - "serde_ignored", - "serde_json", - "shell-escape", - "strip-ansi-escapes", - "tar", - "tempfile", - "termcolor", - "toml", - "unicode-width", - "unicode-xid", - "url", - "walkdir", - "winapi", -] - -[[package]] -name = "cargo-platform" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo-util" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a51c783163bdf4549820b80968d386c94ed45ed23819c93f59cca7ebd97fe0eb" -dependencies = [ - "anyhow", - "core-foundation", - "crypto-hash", - "filetime", - "hex 0.4.3", - "jobserver", - "libc", - "log", - "miow", - "same-file", - "shell-escape", - "tempfile", - "walkdir", - "winapi", -] - -[[package]] -name = "cc" -version = "1.0.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" -dependencies = [ - "jobserver", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "clap" -version = "2.34.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" -dependencies = [ - "ansi_term", - "atty", - "bitflags", - "strsim", - "textwrap", - "unicode-width", - "vec_map", -] - -[[package]] -name = "commoncrypto" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007" -dependencies = [ - "commoncrypto-sys", -] - -[[package]] -name = "commoncrypto-sys" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fed34f46747aa73dfaa578069fd8279d2818ade2b55f38f22a9401c7f4083e2" -dependencies = [ - "libc", -] - -[[package]] -name = "core-foundation" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" - -[[package]] -name = "crates-io" -version = "0.33.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2d7714dc2b336c5a579a1a2aa2d41c7cd7a31ccb25e2ea908dba8934cfeb75a" -dependencies = [ - "anyhow", - "curl", - "percent-encoding", - "serde", - "serde_json", - "url", -] - -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e5bed1f1c269533fa816a0a5492b3545209a205ca1a54842be180eb63a16a6" -dependencies = [ - "cfg-if", - "lazy_static", -] - -[[package]] -name = "crypto-hash" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a77162240fd97248d19a564a565eb563a3f592b386e4136fb300909e67dddca" -dependencies = [ - "commoncrypto", - "hex 0.3.2", - "openssl", - "winapi", -] - -[[package]] -name = "curl" -version = "0.4.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7de97b894edd5b5bcceef8b78d7da9b75b1d2f2f9a910569d0bde3dd31d84939" -dependencies = [ - "curl-sys", - "libc", - "openssl-probe", - "openssl-sys", - "schannel", - "socket2", - "winapi", -] - -[[package]] -name = "curl-sys" -version = "0.4.52+curl-7.81.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14b8c2d1023ea5fded5b7b892e4b8e95f70038a421126a056761a84246a28971" -dependencies = [ - "cc", - "libc", - "libnghttp2-sys", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", - "winapi", -] - -[[package]] -name = "either" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" - -[[package]] -name = "env_logger" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" -dependencies = [ - "atty", - "humantime 1.3.0", - "log", - "regex", - "termcolor", -] - -[[package]] -name = "env_logger" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3" -dependencies = [ - "atty", - "humantime 2.1.0", - "log", - "regex", - "termcolor", -] - -[[package]] -name = "fastrand" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" -dependencies = [ - "instant", -] - -[[package]] -name = "filetime" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "winapi", -] - -[[package]] -name = "flate2" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" -dependencies = [ - "cfg-if", - "crc32fast", - "libc", - "libz-sys", - "miniz_oxide", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "form_urlencoded" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" -dependencies = [ - "matches", - "percent-encoding", -] - -[[package]] -name = "fwdansi" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c1f5787fe85505d1f7777268db5103d80a7a374d2316a7ce262e57baf8f208" -dependencies = [ - "memchr", - "termcolor", -] - -[[package]] -name = "git2" -version = "0.13.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29229cc1b24c0e6062f6e742aa3e256492a5323365e5ed3413599f8a5eff7d6" -dependencies = [ - "bitflags", - "libc", - "libgit2-sys", - "log", - "openssl-probe", - "openssl-sys", - "url", -] - -[[package]] -name = "git2-curl" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "883539cb0ea94bab3f8371a98cd8e937bbe9ee7c044499184aa4c17deb643a50" -dependencies = [ - "curl", - "git2", - "log", - "url", -] - -[[package]] -name = "glob" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" - -[[package]] -name = "globset" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10463d9ff00a2a068db14231982f5132edebad0d7660cd956a1c30292dbcbfbd" -dependencies = [ - "aho-corasick", - "bstr", - "fnv", - "log", - "regex", -] - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hex" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "home" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654" -dependencies = [ - "winapi", -] - -[[package]] -name = "humantime" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" -dependencies = [ - "quick-error", -] - -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - -[[package]] -name = "idna" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" -dependencies = [ - "matches", - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "ignore" -version = "0.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d" -dependencies = [ - "crossbeam-utils", - "globset", - "lazy_static", - "log", - "memchr", - "regex", - "same-file", - "thread_local", - "walkdir", - "winapi-util", -] - -[[package]] -name = "im-rc" -version = "15.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f" -dependencies = [ - "bitmaps", - "rand_core", - "rand_xoshiro", - "sized-chunks", - "typenum", - "version_check", -] - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "itertools" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" - -[[package]] -name = "jobserver" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" -dependencies = [ - "libc", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - -[[package]] -name = "libc" -version = "0.2.119" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf2e165bb3457c8e098ea76f3e3bc9db55f87aa90d52d0e6be741470916aaa4" - -[[package]] -name = "libgit2-sys" -version = "0.12.26+1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e1c899248e606fbfe68dcb31d8b0176ebab833b103824af31bddf4b7457494" -dependencies = [ - "cc", - "libc", - "libssh2-sys", - "libz-sys", - "openssl-sys", - "pkg-config", -] - -[[package]] -name = "libnghttp2-sys" -version = "0.1.7+1.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "libssh2-sys" -version = "0.2.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b094a36eb4b8b8c8a7b4b8ae43b2944502be3e59cd87687595cf6b0a71b3f4ca" -dependencies = [ - "cc", - "libc", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "libz-sys" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de5435b8549c16d423ed0c03dbaafe57cf6c3344744f1242520d59c9d8ecec66" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "log" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "matches" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" - -[[package]] -name = "memchr" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" - -[[package]] -name = "miniz_oxide" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" -dependencies = [ - "adler", - "autocfg", -] - -[[package]] -name = "miow" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" -dependencies = [ - "winapi", -] - -[[package]] -name = "num_cpus" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "once_cell" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" - -[[package]] -name = "opener" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea3ebcd72a54701f56345f16785a6d3ac2df7e986d273eb4395c0b01db17952" -dependencies = [ - "bstr", - "winapi", -] - -[[package]] -name = "openssl" -version = "0.10.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95" -dependencies = [ - "bitflags", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-sys", -] - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "openssl-src" -version = "111.17.0+1.1.1m" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d6a336abd10814198f66e2a91ccd7336611f30334119ca8ce300536666fcf4" -dependencies = [ - "cc", -] - -[[package]] -name = "openssl-sys" -version = "0.9.72" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e46109c383602735fa0a2e48dd2b7c892b048e1bf69e5c3b1d804b7d9c203cb" -dependencies = [ - "autocfg", - "cc", - "libc", - "openssl-src", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "os_info" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "023df84d545ef479cf67fd2f4459a613585c9db4852c2fad12ab70587859d340" -dependencies = [ - "log", - "serde", - "winapi", -] - -[[package]] -name = "percent-encoding" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" - -[[package]] -name = "pkg-config" -version = "0.3.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58893f751c9b0412871a09abd62ecd2a00298c6c83befa223ef98c52aef40cbe" - -[[package]] -name = "pretty_env_logger" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "926d36b9553851b8b0005f1275891b392ee4d2d833852c417ed025477350fb9d" -dependencies = [ - "env_logger 0.7.1", - "log", -] - -[[package]] -name = "proc-macro2" -version = "1.0.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" -dependencies = [ - "unicode-xid", -] - -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quote" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" - -[[package]] -name = "rand_xoshiro" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004" -dependencies = [ - "rand_core", -] - -[[package]] -name = "redox_syscall" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" -dependencies = [ - "bitflags", -] - -[[package]] -name = "regex" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" - -[[package]] -name = "regex-syntax" -version = "0.6.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" - -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - -[[package]] -name = "rustc-workspace-hack" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc71d2faa173b74b232dedc235e3ee1696581bb132fc116fa3626d6151a1a8fb" - -[[package]] -name = "rustfix" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0be05fc0675ef4f47119dc39cfc46636bb77d4fc4ef1bd851b9c3f7697f32a" -dependencies = [ - "anyhow", - "log", - "serde", - "serde_json", -] - -[[package]] -name = "ryu" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schannel" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" -dependencies = [ - "lazy_static", - "winapi", -] - -[[package]] -name = "semver" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a3381e03edd24287172047536f20cabde766e2cd3e65e6b00fb3af51c4f38d" -dependencies = [ - "serde", -] - -[[package]] -name = "serde" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.136" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_ignored" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c2c7d39d14f2f2ea82239de71594782f186fd03501ac81f0ce08e674819ff2f" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_json" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "shell-escape" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45bb67a18fa91266cc7807181f62f9178a6873bfad7dc788c42e6430db40184f" - -[[package]] -name = "sized-chunks" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" -dependencies = [ - "bitmaps", - "typenum", -] - -[[package]] -name = "socket2" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "strip-ansi-escapes" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "011cbb39cf7c1f62871aea3cc46e5817b0937b49e9447370c93cacbe93a766d8" -dependencies = [ - "vte", -] - -[[package]] -name = "strsim" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" - -[[package]] -name = "syn" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "tar" -version = "0.4.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b55807c0344e1e6c04d7c965f5289c39a8d94ae23ed5c0b57aabac549f871c6" -dependencies = [ - "filetime", - "libc", -] - -[[package]] -name = "tempfile" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" -dependencies = [ - "cfg-if", - "fastrand", - "libc", - "redox_syscall", - "remove_dir_all", - "winapi", -] - -[[package]] -name = "termcolor" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "textwrap" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "unicode-width", -] - -[[package]] -name = "thread_local" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" -dependencies = [ - "once_cell", -] - -[[package]] -name = "tinyvec" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" - -[[package]] -name = "toml" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" -dependencies = [ - "serde", -] - -[[package]] -name = "typenum" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" - -[[package]] -name = "unicode-bidi" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" - -[[package]] -name = "unicode-normalization" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-width" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" - -[[package]] -name = "unicode-xid" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" - -[[package]] -name = "url" -version = "2.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" -dependencies = [ - "form_urlencoded", - "idna", - "matches", - "percent-encoding", -] - -[[package]] -name = "utf8parse" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936e4b492acfd135421d8dca4b1aa80a7bfc26e702ef3af710e0752684df5372" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "vec_map" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "vte" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cbce692ab4ca2f1f3047fcf732430249c0e971bfdd2b234cf2c47ad93af5983" -dependencies = [ - "arrayvec", - "utf8parse", - "vte_generate_state_changes", -] - -[[package]] -name = "vte_generate_state_changes" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "walkdir" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" -dependencies = [ - "same-file", - "winapi", - "winapi-util", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/collector/compile-benchmarks/cargo-0.60.0/Cargo.toml b/collector/compile-benchmarks/cargo-0.60.0/Cargo.toml deleted file mode 100644 index 12ee1eec0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/Cargo.toml +++ /dev/null @@ -1,236 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2021" -name = "cargo" -version = "0.60.0" -description = """ -Cargo, a package manager for Rust. -""" -homepage = "https://crates.io" -documentation = "https://docs.rs/cargo" -readme = "README.md" -license = "MIT OR Apache-2.0" -repository = "https://github.com/rust-lang/cargo" - -[lib] -name = "cargo" -path = "src/cargo/lib.rs" - -[[bin]] -name = "cargo" -test = false -doc = false - -[dependencies.anyhow] -version = "1.0" - -[dependencies.atty] -version = "0.2" - -[dependencies.bytesize] -version = "1.0" - -[dependencies.cargo-platform] -version = "0.1.2" - -[dependencies.cargo-util] -version = "0.1.2" - -[dependencies.clap] -version = "2.34.0" - -[dependencies.crates-io] -version = "0.33.1" - -[dependencies.crossbeam-utils] -version = "0.8" - -[dependencies.curl] -version = "0.4.41" -features = ["http2"] - -[dependencies.curl-sys] -version = "0.4.50" - -[dependencies.env_logger] -version = "0.9.0" - -[dependencies.filetime] -version = "0.2.9" - -[dependencies.flate2] -version = "1.0.3" -features = ["zlib"] -default-features = false - -[dependencies.git2] -version = "0.13.23" - -[dependencies.git2-curl] -version = "0.14.1" - -[dependencies.glob] -version = "0.3.0" - -[dependencies.hex] -version = "0.4" - -[dependencies.home] -version = "0.5" - -[dependencies.humantime] -version = "2.0.0" - -[dependencies.ignore] -version = "0.4.7" - -[dependencies.im-rc] -version = "15.0.0" - -[dependencies.itertools] -version = "0.10.0" - -[dependencies.jobserver] -version = "0.1.24" - -[dependencies.lazy_static] -version = "1.2.0" - -[dependencies.lazycell] -version = "1.2.0" - -[dependencies.libc] -version = "0.2" - -[dependencies.libgit2-sys] -version = "0.12.24" - -[dependencies.log] -version = "0.4.6" - -[dependencies.memchr] -version = "2.1.3" - -[dependencies.num_cpus] -version = "1.0" - -[dependencies.opener] -version = "0.5" - -[dependencies.openssl] -version = "0.10.11" -optional = true - -[dependencies.os_info] -version = "3.0.7" - -[dependencies.percent-encoding] -version = "2.0" - -[dependencies.pretty_env_logger] -version = "0.4" -optional = true - -[dependencies.rustc-workspace-hack] -version = "1.0.0" - -[dependencies.rustfix] -version = "0.6.0" - -[dependencies.semver] -version = "1.0.3" -features = ["serde"] - -[dependencies.serde] -version = "1.0.123" -features = ["derive"] - -[dependencies.serde_ignored] -version = "0.1.0" - -[dependencies.serde_json] -version = "1.0.30" -features = ["raw_value"] - -[dependencies.shell-escape] -version = "0.1.4" - -[dependencies.strip-ansi-escapes] -version = "0.1.0" - -[dependencies.tar] -version = "0.4.36" -default-features = false - -[dependencies.tempfile] -version = "3.0" - -[dependencies.termcolor] -version = "1.1" - -[dependencies.toml] -version = "0.5.7" - -[dependencies.unicode-width] -version = "0.1.5" - -[dependencies.unicode-xid] -version = "0.2.0" - -[dependencies.url] -version = "2.2.2" - -[dependencies.walkdir] -version = "2.2" - -[dev-dependencies] - -[build-dependencies.flate2] -version = "1.0.3" -features = ["zlib"] -default-features = false - -[build-dependencies.tar] -version = "0.4.26" -default-features = false - -[features] -deny-warnings = [] -pretty-env-logger = ["pretty_env_logger"] -vendored-openssl = ["openssl/vendored"] - -[target."cfg(windows)".dependencies.fwdansi] -version = "1.1.0" - -[target."cfg(windows)".dependencies.winapi] -version = "0.3" -features = [ - "basetsd", - "handleapi", - "jobapi", - "jobapi2", - "memoryapi", - "minwindef", - "ntdef", - "ntstatus", - "processenv", - "processthreadsapi", - "psapi", - "synchapi", - "winerror", - "winbase", - "wincon", - "winnt", -] - -[workspace] diff --git a/collector/compile-benchmarks/cargo-0.60.0/Cargo.toml.orig b/collector/compile-benchmarks/cargo-0.60.0/Cargo.toml.orig deleted file mode 100644 index 20dfd540f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/Cargo.toml.orig +++ /dev/null @@ -1,115 +0,0 @@ -[package] -name = "cargo" -version = "0.60.0" -edition = "2021" -license = "MIT OR Apache-2.0" -homepage = "https://crates.io" -repository = "https://github.com/rust-lang/cargo" -documentation = "https://docs.rs/cargo" -readme = "README.md" -description = """ -Cargo, a package manager for Rust. -""" - -[lib] -name = "cargo" -path = "src/cargo/lib.rs" - -[dependencies] -atty = "0.2" -bytesize = "1.0" -cargo-platform = { path = "crates/cargo-platform", version = "0.1.2" } -cargo-util = { path = "crates/cargo-util", version = "0.1.2" } -crates-io = { path = "crates/crates-io", version = "0.33.1" } -crossbeam-utils = "0.8" -curl = { version = "0.4.41", features = ["http2"] } -curl-sys = "0.4.50" -env_logger = "0.9.0" -pretty_env_logger = { version = "0.4", optional = true } -anyhow = "1.0" -filetime = "0.2.9" -flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } -git2 = "0.13.23" -git2-curl = "0.14.1" -glob = "0.3.0" -hex = "0.4" -home = "0.5" -humantime = "2.0.0" -ignore = "0.4.7" -lazy_static = "1.2.0" -jobserver = "0.1.24" -lazycell = "1.2.0" -libc = "0.2" -log = "0.4.6" -libgit2-sys = "0.12.24" -memchr = "2.1.3" -num_cpus = "1.0" -opener = "0.5" -os_info = "3.0.7" -percent-encoding = "2.0" -rustfix = "0.6.0" -semver = { version = "1.0.3", features = ["serde"] } -serde = { version = "1.0.123", features = ["derive"] } -serde_ignored = "0.1.0" -serde_json = { version = "1.0.30", features = ["raw_value"] } -shell-escape = "0.1.4" -strip-ansi-escapes = "0.1.0" -tar = { version = "0.4.36", default-features = false } -tempfile = "3.0" -termcolor = "1.1" -toml = "0.5.7" -unicode-xid = "0.2.0" -url = "2.2.2" -walkdir = "2.2" -clap = "2.34.0" -unicode-width = "0.1.5" -openssl = { version = '0.10.11', optional = true } -im-rc = "15.0.0" -itertools = "0.10.0" - -# A noop dependency that changes in the Rust repository, it's a bit of a hack. -# See the `src/tools/rustc-workspace-hack/README.md` file in `rust-lang/rust` -# for more information. -rustc-workspace-hack = "1.0.0" - -[target.'cfg(windows)'.dependencies] -fwdansi = "1.1.0" - -[target.'cfg(windows)'.dependencies.winapi] -version = "0.3" -features = [ - "basetsd", - "handleapi", - "jobapi", - "jobapi2", - "memoryapi", - "minwindef", - "ntdef", - "ntstatus", - "processenv", - "processthreadsapi", - "psapi", - "synchapi", - "winerror", - "winbase", - "wincon", - "winnt", -] - -[dev-dependencies] -cargo-test-macro = { path = "crates/cargo-test-macro" } -cargo-test-support = { path = "crates/cargo-test-support" } - -[build-dependencies] -flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } -tar = { version = "0.4.26", default-features = false } - -[[bin]] -name = "cargo" -test = false -doc = false - -[features] -deny-warnings = [] -vendored-openssl = ["openssl/vendored"] -pretty-env-logger = ["pretty_env_logger"] diff --git a/collector/compile-benchmarks/cargo-0.60.0/LICENSE-APACHE b/collector/compile-benchmarks/cargo-0.60.0/LICENSE-APACHE deleted file mode 100644 index c98d27d4f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/LICENSE-2.0 - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/collector/compile-benchmarks/cargo-0.60.0/LICENSE-MIT b/collector/compile-benchmarks/cargo-0.60.0/LICENSE-MIT deleted file mode 100644 index 31aa79387..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/LICENSE-MIT +++ /dev/null @@ -1,23 +0,0 @@ -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/collector/compile-benchmarks/cargo-0.60.0/LICENSE-THIRD-PARTY b/collector/compile-benchmarks/cargo-0.60.0/LICENSE-THIRD-PARTY deleted file mode 100644 index 8f83ab502..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/LICENSE-THIRD-PARTY +++ /dev/null @@ -1,1272 +0,0 @@ -The Cargo source code itself does not bundle any third party libraries, but it -depends on a number of libraries which carry their own copyright notices and -license terms. These libraries are normally all linked static into the binary -distributions of Cargo: - -* OpenSSL - https://www.openssl.org/source/license.html - - Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - 1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - - 3. All advertising materials mentioning features or use of this - software must display the following acknowledgment: - "This product includes software developed by the OpenSSL Project - for use in the OpenSSL Toolkit. (https://www.openssl.org/)" - - 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to - endorse or promote products derived from this software without - prior written permission. For written permission, please contact - openssl-core@openssl.org. - - 5. Products derived from this software may not be called "OpenSSL" - nor may "OpenSSL" appear in their names without prior written - permission of the OpenSSL Project. - - 6. Redistributions of any form whatsoever must retain the following - acknowledgment: - "This product includes software developed by the OpenSSL Project - for use in the OpenSSL Toolkit (https://www.openssl.org/)" - - THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY - EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR - ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - OF THE POSSIBILITY OF SUCH DAMAGE. - ==================================================================== - - This product includes cryptographic software written by Eric Young - (eay@cryptsoft.com). This product includes software written by Tim - Hudson (tjh@cryptsoft.com). - - --- - - Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) - All rights reserved. - - This package is an SSL implementation written - by Eric Young (eay@cryptsoft.com). - The implementation was written so as to conform with Netscapes SSL. - - This library is free for commercial and non-commercial use as long as - the following conditions are aheared to. The following conditions - apply to all code found in this distribution, be it the RC4, RSA, - lhash, DES, etc., code; not just the SSL code. The SSL documentation - included with this distribution is covered by the same copyright terms - except that the holder is Tim Hudson (tjh@cryptsoft.com). - - Copyright remains Eric Young's, and as such any Copyright notices in - the code are not to be removed. - If this package is used in a product, Eric Young should be given attribution - as the author of the parts of the library used. - This can be in the form of a textual message at program startup or - in documentation (online or textual) provided with the package. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - 1. Redistributions of source code must retain the copyright - notice, this list of conditions and the following disclaimer. - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - 3. All advertising materials mentioning features or use of this software - must display the following acknowledgement: - "This product includes cryptographic software written by - Eric Young (eay@cryptsoft.com)" - The word 'cryptographic' can be left out if the rouines from the library - being used are not cryptographic related :-). - 4. If you include any Windows specific code (or a derivative thereof) from - the apps directory (application code) you must include an acknowledgement: - "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" - - THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND - ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE - FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - SUCH DAMAGE. - - The licence and distribution terms for any publically available version or - derivative of this code cannot be changed. i.e. this code cannot simply be - copied and put under another distribution licence - [including the GNU Public Licence.] - -* libgit2 - https://github.com/libgit2/libgit2/blob/master/COPYING - - libgit2 is Copyright (C) the libgit2 contributors, - unless otherwise stated. See the AUTHORS file for details. - - Note that the only valid version of the GPL as far as this project - is concerned is _this_ particular version of the license (ie v2, not - v2.2 or v3.x or whatever), unless explicitly otherwise stated. - - ---------------------------------------------------------------------- - - LINKING EXCEPTION - - In addition to the permissions in the GNU General Public License, - the authors give you unlimited permission to link the compiled - version of this library into combinations with other programs, - and to distribute those combinations without any restriction - coming from the use of this file. (The General Public License - restrictions do apply in other respects; for example, they cover - modification of the file, and distribution when not linked into - a combined executable.) - - ---------------------------------------------------------------------- - - GNU GENERAL PUBLIC LICENSE - Version 2, June 1991 - - Copyright (C) 1989, 1991 Free Software Foundation, Inc. - 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The licenses for most software are designed to take away your - freedom to share and change it. By contrast, the GNU General Public - License is intended to guarantee your freedom to share and change free - software--to make sure the software is free for all its users. This - General Public License applies to most of the Free Software - Foundation's software and to any other program whose authors commit to - using it. (Some other Free Software Foundation software is covered by - the GNU Library General Public License instead.) You can apply it to - your programs, too. - - When we speak of free software, we are referring to freedom, not - price. Our General Public Licenses are designed to make sure that you - have the freedom to distribute copies of free software (and charge for - this service if you wish), that you receive source code or can get it - if you want it, that you can change the software or use pieces of it - in new free programs; and that you know you can do these things. - - To protect your rights, we need to make restrictions that forbid - anyone to deny you these rights or to ask you to surrender the rights. - These restrictions translate to certain responsibilities for you if you - distribute copies of the software, or if you modify it. - - For example, if you distribute copies of such a program, whether - gratis or for a fee, you must give the recipients all the rights that - you have. You must make sure that they, too, receive or can get the - source code. And you must show them these terms so they know their - rights. - - We protect your rights with two steps: (1) copyright the software, and - (2) offer you this license which gives you legal permission to copy, - distribute and/or modify the software. - - Also, for each author's protection and ours, we want to make certain - that everyone understands that there is no warranty for this free - software. If the software is modified by someone else and passed on, we - want its recipients to know that what they have is not the original, so - that any problems introduced by others will not reflect on the original - authors' reputations. - - Finally, any free program is threatened constantly by software - patents. We wish to avoid the danger that redistributors of a free - program will individually obtain patent licenses, in effect making the - program proprietary. To prevent this, we have made it clear that any - patent must be licensed for everyone's free use or not licensed at all. - - The precise terms and conditions for copying, distribution and - modification follow. - - GNU GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License applies to any program or other work which contains - a notice placed by the copyright holder saying it may be distributed - under the terms of this General Public License. The "Program", below, - refers to any such program or work, and a "work based on the Program" - means either the Program or any derivative work under copyright law: - that is to say, a work containing the Program or a portion of it, - either verbatim or with modifications and/or translated into another - language. (Hereinafter, translation is included without limitation in - the term "modification".) Each licensee is addressed as "you". - - Activities other than copying, distribution and modification are not - covered by this License; they are outside its scope. The act of - running the Program is not restricted, and the output from the Program - is covered only if its contents constitute a work based on the - Program (independent of having been made by running the Program). - Whether that is true depends on what the Program does. - - 1. You may copy and distribute verbatim copies of the Program's - source code as you receive it, in any medium, provided that you - conspicuously and appropriately publish on each copy an appropriate - copyright notice and disclaimer of warranty; keep intact all the - notices that refer to this License and to the absence of any warranty; - and give any other recipients of the Program a copy of this License - along with the Program. - - You may charge a fee for the physical act of transferring a copy, and - you may at your option offer warranty protection in exchange for a fee. - - 2. You may modify your copy or copies of the Program or any portion - of it, thus forming a work based on the Program, and copy and - distribute such modifications or work under the terms of Section 1 - above, provided that you also meet all of these conditions: - - a) You must cause the modified files to carry prominent notices - stating that you changed the files and the date of any change. - - b) You must cause any work that you distribute or publish, that in - whole or in part contains or is derived from the Program or any - part thereof, to be licensed as a whole at no charge to all third - parties under the terms of this License. - - c) If the modified program normally reads commands interactively - when run, you must cause it, when started running for such - interactive use in the most ordinary way, to print or display an - announcement including an appropriate copyright notice and a - notice that there is no warranty (or else, saying that you provide - a warranty) and that users may redistribute the program under - these conditions, and telling the user how to view a copy of this - License. (Exception: if the Program itself is interactive but - does not normally print such an announcement, your work based on - the Program is not required to print an announcement.) - - These requirements apply to the modified work as a whole. If - identifiable sections of that work are not derived from the Program, - and can be reasonably considered independent and separate works in - themselves, then this License, and its terms, do not apply to those - sections when you distribute them as separate works. But when you - distribute the same sections as part of a whole which is a work based - on the Program, the distribution of the whole must be on the terms of - this License, whose permissions for other licensees extend to the - entire whole, and thus to each and every part regardless of who wrote it. - - Thus, it is not the intent of this section to claim rights or contest - your rights to work written entirely by you; rather, the intent is to - exercise the right to control the distribution of derivative or - collective works based on the Program. - - In addition, mere aggregation of another work not based on the Program - with the Program (or with a work based on the Program) on a volume of - a storage or distribution medium does not bring the other work under - the scope of this License. - - 3. You may copy and distribute the Program (or a work based on it, - under Section 2) in object code or executable form under the terms of - Sections 1 and 2 above provided that you also do one of the following: - - a) Accompany it with the complete corresponding machine-readable - source code, which must be distributed under the terms of Sections - 1 and 2 above on a medium customarily used for software interchange; or, - - b) Accompany it with a written offer, valid for at least three - years, to give any third party, for a charge no more than your - cost of physically performing source distribution, a complete - machine-readable copy of the corresponding source code, to be - distributed under the terms of Sections 1 and 2 above on a medium - customarily used for software interchange; or, - - c) Accompany it with the information you received as to the offer - to distribute corresponding source code. (This alternative is - allowed only for noncommercial distribution and only if you - received the program in object code or executable form with such - an offer, in accord with Subsection b above.) - - The source code for a work means the preferred form of the work for - making modifications to it. For an executable work, complete source - code means all the source code for all modules it contains, plus any - associated interface definition files, plus the scripts used to - control compilation and installation of the executable. However, as a - special exception, the source code distributed need not include - anything that is normally distributed (in either source or binary - form) with the major components (compiler, kernel, and so on) of the - operating system on which the executable runs, unless that component - itself accompanies the executable. - - If distribution of executable or object code is made by offering - access to copy from a designated place, then offering equivalent - access to copy the source code from the same place counts as - distribution of the source code, even though third parties are not - compelled to copy the source along with the object code. - - 4. You may not copy, modify, sublicense, or distribute the Program - except as expressly provided under this License. Any attempt - otherwise to copy, modify, sublicense or distribute the Program is - void, and will automatically terminate your rights under this License. - However, parties who have received copies, or rights, from you under - this License will not have their licenses terminated so long as such - parties remain in full compliance. - - 5. You are not required to accept this License, since you have not - signed it. However, nothing else grants you permission to modify or - distribute the Program or its derivative works. These actions are - prohibited by law if you do not accept this License. Therefore, by - modifying or distributing the Program (or any work based on the - Program), you indicate your acceptance of this License to do so, and - all its terms and conditions for copying, distributing or modifying - the Program or works based on it. - - 6. Each time you redistribute the Program (or any work based on the - Program), the recipient automatically receives a license from the - original licensor to copy, distribute or modify the Program subject to - these terms and conditions. You may not impose any further - restrictions on the recipients' exercise of the rights granted herein. - You are not responsible for enforcing compliance by third parties to - this License. - - 7. If, as a consequence of a court judgment or allegation of patent - infringement or for any other reason (not limited to patent issues), - conditions are imposed on you (whether by court order, agreement or - otherwise) that contradict the conditions of this License, they do not - excuse you from the conditions of this License. If you cannot - distribute so as to satisfy simultaneously your obligations under this - License and any other pertinent obligations, then as a consequence you - may not distribute the Program at all. For example, if a patent - license would not permit royalty-free redistribution of the Program by - all those who receive copies directly or indirectly through you, then - the only way you could satisfy both it and this License would be to - refrain entirely from distribution of the Program. - - If any portion of this section is held invalid or unenforceable under - any particular circumstance, the balance of the section is intended to - apply and the section as a whole is intended to apply in other - circumstances. - - It is not the purpose of this section to induce you to infringe any - patents or other property right claims or to contest validity of any - such claims; this section has the sole purpose of protecting the - integrity of the free software distribution system, which is - implemented by public license practices. Many people have made - generous contributions to the wide range of software distributed - through that system in reliance on consistent application of that - system; it is up to the author/donor to decide if he or she is willing - to distribute software through any other system and a licensee cannot - impose that choice. - - This section is intended to make thoroughly clear what is believed to - be a consequence of the rest of this License. - - 8. If the distribution and/or use of the Program is restricted in - certain countries either by patents or by copyrighted interfaces, the - original copyright holder who places the Program under this License - may add an explicit geographical distribution limitation excluding - those countries, so that distribution is permitted only in or among - countries not thus excluded. In such case, this License incorporates - the limitation as if written in the body of this License. - - 9. The Free Software Foundation may publish revised and/or new versions - of the General Public License from time to time. Such new versions will - be similar in spirit to the present version, but may differ in detail to - address new problems or concerns. - - Each version is given a distinguishing version number. If the Program - specifies a version number of this License which applies to it and "any - later version", you have the option of following the terms and conditions - either of that version or of any later version published by the Free - Software Foundation. If the Program does not specify a version number of - this License, you may choose any version ever published by the Free Software - Foundation. - - 10. If you wish to incorporate parts of the Program into other free - programs whose distribution conditions are different, write to the author - to ask for permission. For software which is copyrighted by the Free - Software Foundation, write to the Free Software Foundation; we sometimes - make exceptions for this. Our decision will be guided by the two goals - of preserving the free status of all derivatives of our free software and - of promoting the sharing and reuse of software generally. - - NO WARRANTY - - 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY - FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN - OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES - PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED - OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF - MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS - TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE - PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, - REPAIR OR CORRECTION. - - 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING - WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR - REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, - INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING - OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED - TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY - YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER - PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE - POSSIBILITY OF SUCH DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest - possible use to the public, the best way to achieve this is to make it - free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest - to attach them to the start of each source file to most effectively - convey the exclusion of warranty; and each file should have at least - the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 2 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - - Also add information on how to contact you by electronic and paper mail. - - If the program is interactive, make it output a short notice like this - when it starts in an interactive mode: - - Gnomovision version 69, Copyright (C) year name of author - Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - - The hypothetical commands `show w' and `show c' should show the appropriate - parts of the General Public License. Of course, the commands you use may - be called something other than `show w' and `show c'; they could even be - mouse-clicks or menu items--whatever suits your program. - - You should also get your employer (if you work as a programmer) or your - school, if any, to sign a "copyright disclaimer" for the program, if - necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the program - `Gnomovision' (which makes passes at compilers) written by James Hacker. - - , 1 April 1989 - Ty Coon, President of Vice - - This General Public License does not permit incorporating your program into - proprietary programs. If your program is a subroutine library, you may - consider it more useful to permit linking proprietary applications with the - library. If this is what you want to do, use the GNU Library General - Public License instead of this License. - - ---------------------------------------------------------------------- - - The bundled ZLib code is licensed under the ZLib license: - - Copyright (C) 1995-2010 Jean-loup Gailly and Mark Adler - - This software is provided 'as-is', without any express or implied - warranty. In no event will the authors be held liable for any damages - arising from the use of this software. - - Permission is granted to anyone to use this software for any purpose, - including commercial applications, and to alter it and redistribute it - freely, subject to the following restrictions: - - 1. The origin of this software must not be misrepresented; you must not - claim that you wrote the original software. If you use this software - in a product, an acknowledgment in the product documentation would be - appreciated but is not required. - 2. Altered source versions must be plainly marked as such, and must not be - misrepresented as being the original software. - 3. This notice may not be removed or altered from any source distribution. - - Jean-loup Gailly Mark Adler - jloup@gzip.org madler@alumni.caltech.edu - - ---------------------------------------------------------------------- - - The Clar framework is licensed under the MIT license: - - Copyright (C) 2011 by Vicent Marti - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. - - ---------------------------------------------------------------------- - - The regex library (deps/regex/) is licensed under the GNU LGPL - - GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1, February 1999 - - Copyright (C) 1991, 1999 Free Software Foundation, Inc. - 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - [This is the first released version of the Lesser GPL. It also counts - as the successor of the GNU Library Public License, version 2, hence - the version number 2.1.] - - Preamble - - The licenses for most software are designed to take away your - freedom to share and change it. By contrast, the GNU General Public - Licenses are intended to guarantee your freedom to share and change - free software--to make sure the software is free for all its users. - - This license, the Lesser General Public License, applies to some - specially designated software packages--typically libraries--of the - Free Software Foundation and other authors who decide to use it. You - can use it too, but we suggest you first think carefully about whether - this license or the ordinary General Public License is the better - strategy to use in any particular case, based on the explanations below. - - When we speak of free software, we are referring to freedom of use, - not price. Our General Public Licenses are designed to make sure that - you have the freedom to distribute copies of free software (and charge - for this service if you wish); that you receive source code or can get - it if you want it; that you can change the software and use pieces of - it in new free programs; and that you are informed that you can do - these things. - - To protect your rights, we need to make restrictions that forbid - distributors to deny you these rights or to ask you to surrender these - rights. These restrictions translate to certain responsibilities for - you if you distribute copies of the library or if you modify it. - - For example, if you distribute copies of the library, whether gratis - or for a fee, you must give the recipients all the rights that we gave - you. You must make sure that they, too, receive or can get the source - code. If you link other code with the library, you must provide - complete object files to the recipients, so that they can relink them - with the library after making changes to the library and recompiling - it. And you must show them these terms so they know their rights. - - We protect your rights with a two-step method: (1) we copyright the - library, and (2) we offer you this license, which gives you legal - permission to copy, distribute and/or modify the library. - - To protect each distributor, we want to make it very clear that - there is no warranty for the free library. Also, if the library is - modified by someone else and passed on, the recipients should know - that what they have is not the original version, so that the original - author's reputation will not be affected by problems that might be - introduced by others. - - Finally, software patents pose a constant threat to the existence of - any free program. We wish to make sure that a company cannot - effectively restrict the users of a free program by obtaining a - restrictive license from a patent holder. Therefore, we insist that - any patent license obtained for a version of the library must be - consistent with the full freedom of use specified in this license. - - Most GNU software, including some libraries, is covered by the - ordinary GNU General Public License. This license, the GNU Lesser - General Public License, applies to certain designated libraries, and - is quite different from the ordinary General Public License. We use - this license for certain libraries in order to permit linking those - libraries into non-free programs. - - When a program is linked with a library, whether statically or using - a shared library, the combination of the two is legally speaking a - combined work, a derivative of the original library. The ordinary - General Public License therefore permits such linking only if the - entire combination fits its criteria of freedom. The Lesser General - Public License permits more lax criteria for linking other code with - the library. - - We call this license the "Lesser" General Public License because it - does Less to protect the user's freedom than the ordinary General - Public License. It also provides other free software developers Less - of an advantage over competing non-free programs. These disadvantages - are the reason we use the ordinary General Public License for many - libraries. However, the Lesser license provides advantages in certain - special circumstances. - - For example, on rare occasions, there may be a special need to - encourage the widest possible use of a certain library, so that it becomes - a de-facto standard. To achieve this, non-free programs must be - allowed to use the library. A more frequent case is that a free - library does the same job as widely used non-free libraries. In this - case, there is little to gain by limiting the free library to free - software only, so we use the Lesser General Public License. - - In other cases, permission to use a particular library in non-free - programs enables a greater number of people to use a large body of - free software. For example, permission to use the GNU C Library in - non-free programs enables many more people to use the whole GNU - operating system, as well as its variant, the GNU/Linux operating - system. - - Although the Lesser General Public License is Less protective of the - users' freedom, it does ensure that the user of a program that is - linked with the Library has the freedom and the wherewithal to run - that program using a modified version of the Library. - - The precise terms and conditions for copying, distribution and - modification follow. Pay close attention to the difference between a - "work based on the library" and a "work that uses the library". The - former contains code derived from the library, whereas the latter must - be combined with the library in order to run. - - GNU LESSER GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License Agreement applies to any software library or other - program which contains a notice placed by the copyright holder or - other authorized party saying it may be distributed under the terms of - this Lesser General Public License (also called "this License"). - Each licensee is addressed as "you". - - A "library" means a collection of software functions and/or data - prepared so as to be conveniently linked with application programs - (which use some of those functions and data) to form executables. - - The "Library", below, refers to any such software library or work - which has been distributed under these terms. A "work based on the - Library" means either the Library or any derivative work under - copyright law: that is to say, a work containing the Library or a - portion of it, either verbatim or with modifications and/or translated - straightforwardly into another language. (Hereinafter, translation is - included without limitation in the term "modification".) - - "Source code" for a work means the preferred form of the work for - making modifications to it. For a library, complete source code means - all the source code for all modules it contains, plus any associated - interface definition files, plus the scripts used to control compilation - and installation of the library. - - Activities other than copying, distribution and modification are not - covered by this License; they are outside its scope. The act of - running a program using the Library is not restricted, and output from - such a program is covered only if its contents constitute a work based - on the Library (independent of the use of the Library in a tool for - writing it). Whether that is true depends on what the Library does - and what the program that uses the Library does. - - 1. You may copy and distribute verbatim copies of the Library's - complete source code as you receive it, in any medium, provided that - you conspicuously and appropriately publish on each copy an - appropriate copyright notice and disclaimer of warranty; keep intact - all the notices that refer to this License and to the absence of any - warranty; and distribute a copy of this License along with the - Library. - - You may charge a fee for the physical act of transferring a copy, - and you may at your option offer warranty protection in exchange for a - fee. - - 2. You may modify your copy or copies of the Library or any portion - of it, thus forming a work based on the Library, and copy and - distribute such modifications or work under the terms of Section 1 - above, provided that you also meet all of these conditions: - - a) The modified work must itself be a software library. - - b) You must cause the files modified to carry prominent notices - stating that you changed the files and the date of any change. - - c) You must cause the whole of the work to be licensed at no - charge to all third parties under the terms of this License. - - d) If a facility in the modified Library refers to a function or a - table of data to be supplied by an application program that uses - the facility, other than as an argument passed when the facility - is invoked, then you must make a good faith effort to ensure that, - in the event an application does not supply such function or - table, the facility still operates, and performs whatever part of - its purpose remains meaningful. - - (For example, a function in a library to compute square roots has - a purpose that is entirely well-defined independent of the - application. Therefore, Subsection 2d requires that any - application-supplied function or table used by this function must - be optional: if the application does not supply it, the square - root function must still compute square roots.) - - These requirements apply to the modified work as a whole. If - identifiable sections of that work are not derived from the Library, - and can be reasonably considered independent and separate works in - themselves, then this License, and its terms, do not apply to those - sections when you distribute them as separate works. But when you - distribute the same sections as part of a whole which is a work based - on the Library, the distribution of the whole must be on the terms of - this License, whose permissions for other licensees extend to the - entire whole, and thus to each and every part regardless of who wrote - it. - - Thus, it is not the intent of this section to claim rights or contest - your rights to work written entirely by you; rather, the intent is to - exercise the right to control the distribution of derivative or - collective works based on the Library. - - In addition, mere aggregation of another work not based on the Library - with the Library (or with a work based on the Library) on a volume of - a storage or distribution medium does not bring the other work under - the scope of this License. - - 3. You may opt to apply the terms of the ordinary GNU General Public - License instead of this License to a given copy of the Library. To do - this, you must alter all the notices that refer to this License, so - that they refer to the ordinary GNU General Public License, version 2, - instead of to this License. (If a newer version than version 2 of the - ordinary GNU General Public License has appeared, then you can specify - that version instead if you wish.) Do not make any other change in - these notices. - - Once this change is made in a given copy, it is irreversible for - that copy, so the ordinary GNU General Public License applies to all - subsequent copies and derivative works made from that copy. - - This option is useful when you wish to copy part of the code of - the Library into a program that is not a library. - - 4. You may copy and distribute the Library (or a portion or - derivative of it, under Section 2) in object code or executable form - under the terms of Sections 1 and 2 above provided that you accompany - it with the complete corresponding machine-readable source code, which - must be distributed under the terms of Sections 1 and 2 above on a - medium customarily used for software interchange. - - If distribution of object code is made by offering access to copy - from a designated place, then offering equivalent access to copy the - source code from the same place satisfies the requirement to - distribute the source code, even though third parties are not - compelled to copy the source along with the object code. - - 5. A program that contains no derivative of any portion of the - Library, but is designed to work with the Library by being compiled or - linked with it, is called a "work that uses the Library". Such a - work, in isolation, is not a derivative work of the Library, and - therefore falls outside the scope of this License. - - However, linking a "work that uses the Library" with the Library - creates an executable that is a derivative of the Library (because it - contains portions of the Library), rather than a "work that uses the - library". The executable is therefore covered by this License. - Section 6 states terms for distribution of such executables. - - When a "work that uses the Library" uses material from a header file - that is part of the Library, the object code for the work may be a - derivative work of the Library even though the source code is not. - Whether this is true is especially significant if the work can be - linked without the Library, or if the work is itself a library. The - threshold for this to be true is not precisely defined by law. - - If such an object file uses only numerical parameters, data - structure layouts and accessors, and small macros and small inline - functions (ten lines or less in length), then the use of the object - file is unrestricted, regardless of whether it is legally a derivative - work. (Executables containing this object code plus portions of the - Library will still fall under Section 6.) - - Otherwise, if the work is a derivative of the Library, you may - distribute the object code for the work under the terms of Section 6. - Any executables containing that work also fall under Section 6, - whether or not they are linked directly with the Library itself. - - 6. As an exception to the Sections above, you may also combine or - link a "work that uses the Library" with the Library to produce a - work containing portions of the Library, and distribute that work - under terms of your choice, provided that the terms permit - modification of the work for the customer's own use and reverse - engineering for debugging such modifications. - - You must give prominent notice with each copy of the work that the - Library is used in it and that the Library and its use are covered by - this License. You must supply a copy of this License. If the work - during execution displays copyright notices, you must include the - copyright notice for the Library among them, as well as a reference - directing the user to the copy of this License. Also, you must do one - of these things: - - a) Accompany the work with the complete corresponding - machine-readable source code for the Library including whatever - changes were used in the work (which must be distributed under - Sections 1 and 2 above); and, if the work is an executable linked - with the Library, with the complete machine-readable "work that - uses the Library", as object code and/or source code, so that the - user can modify the Library and then relink to produce a modified - executable containing the modified Library. (It is understood - that the user who changes the contents of definitions files in the - Library will not necessarily be able to recompile the application - to use the modified definitions.) - - b) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (1) uses at run time a - copy of the library already present on the user's computer system, - rather than copying library functions into the executable, and (2) - will operate properly with a modified version of the library, if - the user installs one, as long as the modified version is - interface-compatible with the version that the work was made with. - - c) Accompany the work with a written offer, valid for at - least three years, to give the same user the materials - specified in Subsection 6a, above, for a charge no more - than the cost of performing this distribution. - - d) If distribution of the work is made by offering access to copy - from a designated place, offer equivalent access to copy the above - specified materials from the same place. - - e) Verify that the user has already received a copy of these - materials or that you have already sent this user a copy. - - For an executable, the required form of the "work that uses the - Library" must include any data and utility programs needed for - reproducing the executable from it. However, as a special exception, - the materials to be distributed need not include anything that is - normally distributed (in either source or binary form) with the major - components (compiler, kernel, and so on) of the operating system on - which the executable runs, unless that component itself accompanies - the executable. - - It may happen that this requirement contradicts the license - restrictions of other proprietary libraries that do not normally - accompany the operating system. Such a contradiction means you cannot - use both them and the Library together in an executable that you - distribute. - - 7. You may place library facilities that are a work based on the - Library side-by-side in a single library together with other library - facilities not covered by this License, and distribute such a combined - library, provided that the separate distribution of the work based on - the Library and of the other library facilities is otherwise - permitted, and provided that you do these two things: - - a) Accompany the combined library with a copy of the same work - based on the Library, uncombined with any other library - facilities. This must be distributed under the terms of the - Sections above. - - b) Give prominent notice with the combined library of the fact - that part of it is a work based on the Library, and explaining - where to find the accompanying uncombined form of the same work. - - 8. You may not copy, modify, sublicense, link with, or distribute - the Library except as expressly provided under this License. Any - attempt otherwise to copy, modify, sublicense, link with, or - distribute the Library is void, and will automatically terminate your - rights under this License. However, parties who have received copies, - or rights, from you under this License will not have their licenses - terminated so long as such parties remain in full compliance. - - 9. You are not required to accept this License, since you have not - signed it. However, nothing else grants you permission to modify or - distribute the Library or its derivative works. These actions are - prohibited by law if you do not accept this License. Therefore, by - modifying or distributing the Library (or any work based on the - Library), you indicate your acceptance of this License to do so, and - all its terms and conditions for copying, distributing or modifying - the Library or works based on it. - - 10. Each time you redistribute the Library (or any work based on the - Library), the recipient automatically receives a license from the - original licensor to copy, distribute, link with or modify the Library - subject to these terms and conditions. You may not impose any further - restrictions on the recipients' exercise of the rights granted herein. - You are not responsible for enforcing compliance by third parties with - this License. - - 11. If, as a consequence of a court judgment or allegation of patent - infringement or for any other reason (not limited to patent issues), - conditions are imposed on you (whether by court order, agreement or - otherwise) that contradict the conditions of this License, they do not - excuse you from the conditions of this License. If you cannot - distribute so as to satisfy simultaneously your obligations under this - License and any other pertinent obligations, then as a consequence you - may not distribute the Library at all. For example, if a patent - license would not permit royalty-free redistribution of the Library by - all those who receive copies directly or indirectly through you, then - the only way you could satisfy both it and this License would be to - refrain entirely from distribution of the Library. - - If any portion of this section is held invalid or unenforceable under any - particular circumstance, the balance of the section is intended to apply, - and the section as a whole is intended to apply in other circumstances. - - It is not the purpose of this section to induce you to infringe any - patents or other property right claims or to contest validity of any - such claims; this section has the sole purpose of protecting the - integrity of the free software distribution system which is - implemented by public license practices. Many people have made - generous contributions to the wide range of software distributed - through that system in reliance on consistent application of that - system; it is up to the author/donor to decide if he or she is willing - to distribute software through any other system and a licensee cannot - impose that choice. - - This section is intended to make thoroughly clear what is believed to - be a consequence of the rest of this License. - - 12. If the distribution and/or use of the Library is restricted in - certain countries either by patents or by copyrighted interfaces, the - original copyright holder who places the Library under this License may add - an explicit geographical distribution limitation excluding those countries, - so that distribution is permitted only in or among countries not thus - excluded. In such case, this License incorporates the limitation as if - written in the body of this License. - - 13. The Free Software Foundation may publish revised and/or new - versions of the Lesser General Public License from time to time. - Such new versions will be similar in spirit to the present version, - but may differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the Library - specifies a version number of this License which applies to it and - "any later version", you have the option of following the terms and - conditions either of that version or of any later version published by - the Free Software Foundation. If the Library does not specify a - license version number, you may choose any version ever published by - the Free Software Foundation. - - 14. If you wish to incorporate parts of the Library into other free - programs whose distribution conditions are incompatible with these, - write to the author to ask for permission. For software which is - copyrighted by the Free Software Foundation, write to the Free - Software Foundation; we sometimes make exceptions for this. Our - decision will be guided by the two goals of preserving the free status - of all derivatives of our free software and of promoting the sharing - and reuse of software generally. - - NO WARRANTY - - 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO - WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. - EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR - OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY - KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE - LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME - THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN - WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY - AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU - FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR - CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE - LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING - RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A - FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF - SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH - DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Libraries - - If you develop a new library, and you want it to be of the greatest - possible use to the public, we recommend making it free software that - everyone can redistribute and change. You can do so by permitting - redistribution under these terms (or, alternatively, under the terms of the - ordinary General Public License). - - To apply these terms, attach the following notices to the library. It is - safest to attach them to the start of each source file to most effectively - convey the exclusion of warranty; and each file should have at least the - "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA - - Also add information on how to contact you by electronic and paper mail. - - You should also get your employer (if you work as a programmer) or your - school, if any, to sign a "copyright disclaimer" for the library, if - necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the - library `Frob' (a library for tweaking knobs) written by James Random Hacker. - - , 1 April 1990 - Ty Coon, President of Vice - - That's all there is to it! - - ---------------------------------------------------------------------- - -* libssh2 - https://www.libssh2.org/license.html - - Copyright (c) 2004-2007 Sara Golemon - Copyright (c) 2005,2006 Mikhail Gusarov - Copyright (c) 2006-2007 The Written Word, Inc. - Copyright (c) 2007 Eli Fant - Copyright (c) 2009 Daniel Stenberg - Copyright (C) 2008, 2009 Simon Josefsson - All rights reserved. - - Redistribution and use in source and binary forms, - with or without modification, are permitted provided - that the following conditions are met: - - Redistributions of source code must retain the above - copyright notice, this list of conditions and the - following disclaimer. - - Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials - provided with the distribution. - - Neither the name of the copyright holder nor the names - of any other contributors may be used to endorse or - promote products derived from this software without - specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND - CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, - INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES - OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, - WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY - OF SUCH DAMAGE. - -* libcurl - https://curl.haxx.se/docs/copyright.html - - COPYRIGHT AND PERMISSION NOTICE - - Copyright (c) 1996 - 2014, Daniel Stenberg, daniel@haxx.se. - - All rights reserved. - - Permission to use, copy, modify, and distribute this software for any - purpose with or without fee is hereby granted, provided that the above - copyright notice and this permission notice appear in all copies. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. - IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, - DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR - OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE - USE OR OTHER DEALINGS IN THE SOFTWARE. - - Except as contained in this notice, the name of a copyright holder shall not - be used in advertising or otherwise to promote the sale, use or other - dealings in this Software without prior written authorization of the - copyright holder. - -* flate2-rs - https://github.com/alexcrichton/flate2-rs/blob/master/LICENSE-MIT -* link-config - https://github.com/alexcrichton/link-config/blob/master/LICENSE-MIT -* openssl-static-sys - https://github.com/alexcrichton/openssl-static-sys/blob/master/LICENSE-MIT -* toml-rs - https://github.com/alexcrichton/toml-rs/blob/master/LICENSE-MIT -* libssh2-static-sys - https://github.com/alexcrichton/libssh2-static-sys/blob/master/LICENSE-MIT -* git2-rs - https://github.com/alexcrichton/git2-rs/blob/master/LICENSE-MIT -* tar-rs - https://github.com/alexcrichton/tar-rs/blob/master/LICENSE-MIT - - Copyright (c) 2014 Alex Crichton - - Permission is hereby granted, free of charge, to any - person obtaining a copy of this software and associated - documentation files (the "Software"), to deal in the - Software without restriction, including without - limitation the rights to use, copy, modify, merge, - publish, distribute, sublicense, and/or sell copies of - the Software, and to permit persons to whom the Software - is furnished to do so, subject to the following - conditions: - - The above copyright notice and this permission notice - shall be included in all copies or substantial portions - of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF - ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED - TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A - PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT - SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR - IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. - -* glob - https://github.com/rust-lang/glob/blob/master/LICENSE-MIT -* semver - https://github.com/rust-lang/semver/blob/master/LICENSE-MIT - - Copyright (c) 2014 The Rust Project Developers - - Permission is hereby granted, free of charge, to any - person obtaining a copy of this software and associated - documentation files (the "Software"), to deal in the - Software without restriction, including without - limitation the rights to use, copy, modify, merge, - publish, distribute, sublicense, and/or sell copies of - the Software, and to permit persons to whom the Software - is furnished to do so, subject to the following - conditions: - - The above copyright notice and this permission notice - shall be included in all copies or substantial portions - of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF - ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED - TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A - PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT - SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR - IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. - -* rust-url - https://github.com/servo/rust-url/blob/master/LICENSE-MIT - - Copyright (c) 2006-2009 Graydon Hoare - Copyright (c) 2009-2013 Mozilla Foundation - - Permission is hereby granted, free of charge, to any - person obtaining a copy of this software and associated - documentation files (the "Software"), to deal in the - Software without restriction, including without - limitation the rights to use, copy, modify, merge, - publish, distribute, sublicense, and/or sell copies of - the Software, and to permit persons to whom the Software - is furnished to do so, subject to the following - conditions: - - The above copyright notice and this permission notice - shall be included in all copies or substantial portions - of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF - ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED - TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A - PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT - SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR - IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. - -* rust-encoding - https://github.com/lifthrasiir/rust-encoding/blob/master/LICENSE.txt - - The MIT License (MIT) - - Copyright (c) 2013, Kang Seonghoon. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. - -* curl-rust - https://github.com/carllerche/curl-rust/blob/master/LICENSE - - Copyright (c) 2014 Carl Lerche - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in - all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - THE SOFTWARE. - -* docopt.rs - https://github.com/docopt/docopt.rs/blob/master/UNLICENSE - - This is free and unencumbered software released into the public domain. - - Anyone is free to copy, modify, publish, use, compile, sell, or - distribute this software, either in source code form or as a compiled - binary, for any purpose, commercial or non-commercial, and by any - means. - - In jurisdictions that recognize copyright laws, the author or authors - of this software dedicate any and all copyright interest in the - software to the public domain. We make this dedication for the benefit - of the public at large and to the detriment of our heirs and - successors. We intend this dedication to be an overt act of - relinquishment in perpetuity of all present and future rights to this - software under copyright law. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR - OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR - OTHER DEALINGS IN THE SOFTWARE. - - For more information, please refer to - diff --git a/collector/compile-benchmarks/cargo-0.60.0/README.md b/collector/compile-benchmarks/cargo-0.60.0/README.md deleted file mode 100644 index 6eed7578e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/README.md +++ /dev/null @@ -1,93 +0,0 @@ -# Cargo - -Cargo downloads your Rust projectโ€™s dependencies and compiles your project. - -Learn more at https://doc.rust-lang.org/cargo/ - -## Code Status - -[![Build Status](https://dev.azure.com/rust-lang/cargo/_apis/build/status/rust-lang.cargo?branchName=auto-cargo)](https://dev.azure.com/rust-lang/cargo/_build?definitionId=18) - -Code documentation: https://docs.rs/cargo/ - -## Installing Cargo - -Cargo is distributed by default with Rust, so if you've got `rustc` installed -locally you probably also have `cargo` installed locally. - -## Compiling from Source - -Cargo requires the following tools and packages to build: - -* `git` -* `curl` (on Unix) -* `pkg-config` (on Unix, used to figure out the `libssl` headers/libraries) -* OpenSSL headers (only for Unix, this is the `libssl-dev` package on ubuntu) -* `cargo` and `rustc` - -First, you'll want to check out this repository - -``` -git clone https://github.com/rust-lang/cargo -cd cargo -``` - -With `cargo` already installed, you can simply run: - -``` -cargo build --release -``` - -## Adding new subcommands to Cargo - -Cargo is designed to be extensible with new subcommands without having to modify -Cargo itself. See [the Wiki page][third-party-subcommands] for more details and -a list of known community-developed subcommands. - -[third-party-subcommands]: https://github.com/rust-lang/cargo/wiki/Third-party-cargo-subcommands - - -## Releases - -Cargo releases coincide with Rust releases. -High level release notes are available as part of [Rust's release notes][rel]. -Detailed release notes are available in this repo at [CHANGELOG.md]. - -[rel]: https://github.com/rust-lang/rust/blob/master/RELEASES.md -[CHANGELOG.md]: CHANGELOG.md - -## Reporting issues - -Found a bug? We'd love to know about it! - -Please report all issues on the GitHub [issue tracker][issues]. - -[issues]: https://github.com/rust-lang/cargo/issues - -## Contributing - -See the **[Cargo Contributor Guide]** for a complete introduction -to contributing to Cargo. - -[Cargo Contributor Guide]: https://rust-lang.github.io/cargo/contrib/ - -## License - -Cargo is primarily distributed under the terms of both the MIT license -and the Apache License (Version 2.0). - -See [LICENSE-APACHE](LICENSE-APACHE) and [LICENSE-MIT](LICENSE-MIT) for details. - -### Third party software - -This product includes software developed by the OpenSSL Project -for use in the OpenSSL Toolkit (https://www.openssl.org/). - -In binary form, this product includes software that is licensed under the -terms of the GNU General Public License, version 2, with a linking exception, -which can be obtained from the [upstream repository][1]. - -See [LICENSE-THIRD-PARTY](LICENSE-THIRD-PARTY) for details. - -[1]: https://github.com/libgit2/libgit2 - diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/README.md b/collector/compile-benchmarks/cargo-0.60.0/benches/README.md deleted file mode 100644 index b4b8b190a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/benches/README.md +++ /dev/null @@ -1,124 +0,0 @@ -# Cargo Benchmarking - -This directory contains some benchmarks for cargo itself. This uses -[Criterion] for running benchmarks. It is recommended to read the Criterion -book to get familiar with how to use it. A basic usage would be: - -```sh -cd benches/benchsuite -cargo bench -``` - -The tests involve downloading the index and benchmarking against some -real-world and artificial workspaces located in the [`workspaces`](workspaces) -directory. - -**Beware** that the initial download can take a fairly long amount of time (10 -minutes minimum on an extremely fast network) and require significant disk -space (around 4.5GB). The benchsuite will cache the index and downloaded -crates in the `target/tmp/bench` directory, so subsequent runs should be -faster. You can (and probably should) specify individual benchmarks to run to -narrow it down to a more reasonable set, for example: - -```sh -cargo bench -- resolve_ws/rust -``` - -This will only download what's necessary for the rust-lang/rust workspace -(which is about 330MB) and run the benchmarks against it (which should take -about a minute). To get a list of all the benchmarks, run: - -```sh -cargo bench -- --list -``` - -## Viewing reports - -The benchmarks display some basic information on the command-line while they -run. A more complete HTML report can be found at -`target/criterion/report/index.html` which contains links to all the -benchmarks and summaries. Check out the Criterion book for more information on -the extensive reporting capabilities. - -## Comparing implementations - -Knowing the raw numbers can be useful, but what you're probably most -interested in is checking if your changes help or hurt performance. To do -that, you need to run the benchmarks multiple times. - -First, run the benchmarks from the master branch of cargo without any changes. -To make it easier to compare, Criterion supports naming the baseline so that -you can iterate on your code and compare against it multiple times. - -```sh -cargo bench -- --save-baseline master -``` - -Now you can switch to your branch with your changes. Re-run the benchmarks -compared against the baseline: - -```sh -cargo bench -- --baseline master -``` - -You can repeat the last command as you make changes to re-compare against the -master baseline. - -Without the baseline arguments, it will compare against the last run, which -can be helpful for comparing incremental changes. - -## Capturing workspaces - -The [`workspaces`](workspaces) directory contains several workspaces that -provide a variety of different workspaces intended to provide good exercises -for benchmarks. Some of these are shadow copies of real-world workspaces. This -is done with the tool in the [`capture`](capture) directory. The tool will -copy `Cargo.lock` and all of the `Cargo.toml` files of the workspace members. -It also adds an empty `lib.rs` so Cargo won't error, and sanitizes the -`Cargo.toml` to some degree, removing unwanted elements. Finally, it -compresses everything into a `tgz`. - -To run it, do: - -```sh -cd benches/capture -cargo run -- /path/to/workspace/foo -``` - -The resolver benchmarks also support the `CARGO_BENCH_WORKSPACES` environment -variable, which you can point to a Cargo workspace if you want to try -different workspaces. For example: - -```sh -CARGO_BENCH_WORKSPACES=/path/to/some/workspace cargo bench -``` - -## TODO - -This is just a start for establishing a benchmarking suite for Cargo. There's -a lot that can be added. Some ideas: - -* Fix the benchmarks so that the resolver setup doesn't run every iteration. -* Benchmark [this section of - code](https://github.com/rust-lang/cargo/blob/a821e2cb24d7b6013433f069ab3bad53d160e100/src/cargo/ops/cargo_compile.rs#L470-L549) - which builds the unit graph. The performance there isn't great, and it would - be good to keep an eye on it. Unfortunately that would mean doing a bit of - work to make `generate_targets` publicly visible, and there is a bunch of - setup code that may need to be duplicated. -* Benchmark the fingerprinting code. -* Benchmark running the `cargo` executable. Running something like `cargo - build` or `cargo check` with everything "Fresh" would be a good end-to-end - exercise to measure the overall overhead of Cargo. -* Benchmark pathological resolver scenarios. There might be some cases where - the resolver can spend a significant amount of time. It would be good to - identify if these exist, and create benchmarks for them. This may require - creating an artificial index, similar to the `resolver-tests`. This should - also consider scenarios where the resolver ultimately fails. -* Benchmark without `Cargo.lock`. I'm not sure if this is particularly - valuable, since we are mostly concerned with incremental builds which will - always have a lock file. -* Benchmark just - [`resolve::resolve`](https://github.com/rust-lang/cargo/blob/a821e2cb24d7b6013433f069ab3bad53d160e100/src/cargo/core/resolver/mod.rs#L122) - without anything else. This can help focus on just the resolver. - -[Criterion]: https://bheisler.github.io/criterion.rs/book/ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/cargo.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/cargo.tgz deleted file mode 100644 index 653aff982..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/cargo.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/diem.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/diem.tgz deleted file mode 100644 index e047c6cd0..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/diem.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/empty.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/empty.tgz deleted file mode 100644 index 1a7d555b4..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/empty.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/gecko-dev.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/gecko-dev.tgz deleted file mode 100644 index e89c676b2..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/gecko-dev.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/rust.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/rust.tgz deleted file mode 100644 index 74da4759b..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/rust.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/servo.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/servo.tgz deleted file mode 100644 index 511164369..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/servo.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/substrate.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/substrate.tgz deleted file mode 100644 index 81c3874f6..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/substrate.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/tikv.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/tikv.tgz deleted file mode 100644 index 74add19b3..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/tikv.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/toml-rs.tgz b/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/toml-rs.tgz deleted file mode 100644 index 9acab1982..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/benches/workspaces/toml-rs.tgz and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/build.rs b/collector/compile-benchmarks/cargo-0.60.0/build.rs deleted file mode 100644 index 68865b58f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/build.rs +++ /dev/null @@ -1,43 +0,0 @@ -use flate2::{Compression, GzBuilder}; -use std::ffi::OsStr; -use std::fs; -use std::path::Path; - -fn main() { - compress_man(); - println!( - "cargo:rustc-env=RUST_HOST_TARGET={}", - std::env::var("TARGET").unwrap() - ); -} - -fn compress_man() { - let out_path = Path::new(&std::env::var("OUT_DIR").unwrap()).join("man.tgz"); - let dst = fs::File::create(out_path).unwrap(); - let encoder = GzBuilder::new() - .filename("man.tar") - .write(dst, Compression::best()); - let mut ar = tar::Builder::new(encoder); - ar.mode(tar::HeaderMode::Deterministic); - - let mut add_files = |dir, extension| { - let mut files = fs::read_dir(dir) - .unwrap() - .map(|e| e.unwrap().path()) - .collect::>(); - files.sort(); - for path in files { - if path.extension() != Some(extension) { - continue; - } - println!("cargo:rerun-if-changed={}", path.display()); - ar.append_path_with_name(&path, path.file_name().unwrap()) - .unwrap(); - } - }; - - add_files(Path::new("src/etc/man"), OsStr::new("1")); - add_files(Path::new("src/doc/man/generated_txt"), OsStr::new("txt")); - let encoder = ar.into_inner().unwrap(); - encoder.finish().unwrap(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/ci/fetch-smoke-test.sh b/collector/compile-benchmarks/cargo-0.60.0/ci/fetch-smoke-test.sh deleted file mode 100755 index 17993d1fd..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/ci/fetch-smoke-test.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# This script builds with static curl, and verifies that fetching works. - -set -ex - -if [[ -z "$RUNNER_TEMP" ]] -then - echo "RUNNER_TEMP must be set" - exit 1 -fi - -if [ ! -f Cargo.toml ]; then - echo "Must be run from root of project." - exit 1 -fi - - -# Building openssl on Windows is a pain. -if [[ $(rustc -Vv | grep host:) != *windows* ]]; then - FEATURES='vendored-openssl,curl-sys/static-curl,curl-sys/force-system-lib-on-osx' - export LIBZ_SYS_STATIC=1 -fi - -cargo build --features "$FEATURES" -export CARGO_HOME=$RUNNER_TEMP/chome -target/debug/cargo fetch -rm -rf $CARGO_HOME diff --git a/collector/compile-benchmarks/cargo-0.60.0/ci/validate-man.sh b/collector/compile-benchmarks/cargo-0.60.0/ci/validate-man.sh deleted file mode 100755 index 92df49781..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/ci/validate-man.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# This script validates that there aren't any changes to the man pages. - -set -e - -cd src/doc - -changes=$(git status --porcelain) -if [ -n "$changes" ] -then - echo "git directory must be clean before running this script." - exit 1 -fi - -./build-man.sh - -changes=$(git status --porcelain) -if [ -n "$changes" ] -then - echo "Detected changes in man pages:" - echo "$changes" - echo - echo "Please run './build-man.sh' in the src/doc directory to rebuild the" - echo "man pages, and commit the changes." - exit 1 -fi diff --git a/collector/compile-benchmarks/cargo-0.60.0/crates/credential/README.md b/collector/compile-benchmarks/cargo-0.60.0/crates/credential/README.md deleted file mode 100644 index 168cc71c3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/crates/credential/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# Cargo Credential Packages - -This directory contains Cargo packages for handling storage of tokens in a -secure manner. - -`cargo-credential` is a generic library to assist writing a credential -process. The other directories contain implementations that integrate with -specific credential systems. diff --git a/collector/compile-benchmarks/cargo-0.60.0/perf-config.json b/collector/compile-benchmarks/cargo-0.60.0/perf-config.json deleted file mode 100644 index 57ea68054..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/perf-config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "cargo_opts": "--lib", - "runs": 1, - "touch_file": "src/cargo/lib.rs", - "category": "primary", - "artifact": "library" -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/publish.py b/collector/compile-benchmarks/cargo-0.60.0/publish.py deleted file mode 100755 index 5ace18f72..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/publish.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python3 - -# This script is used to publish Cargo to crates.io. - -import os -import re -import subprocess -import time -import urllib.request -from urllib.error import HTTPError - - -TO_PUBLISH = [ - 'crates/cargo-platform', - 'crates/cargo-util', - 'crates/crates-io', - '.', -] - - -def already_published(name, version): - try: - urllib.request.urlopen('https://crates.io/api/v1/crates/%s/%s/download' % (name, version)) - except HTTPError as e: - if e.code == 404: - return False - raise - return True - - -def maybe_publish(path): - content = open(os.path.join(path, 'Cargo.toml')).read() - name = re.search('^name = "([^"]+)"', content, re.M).group(1) - version = re.search('^version = "([^"]+)"', content, re.M).group(1) - if already_published(name, version): - print('%s %s is already published, skipping' % (name, version)) - return False - subprocess.check_call(['cargo', 'publish', '--no-verify'], cwd=path) - return True - - -def main(): - print('Starting publish...') - for i, path in enumerate(TO_PUBLISH): - if maybe_publish(path): - if i < len(TO_PUBLISH)-1: - # Sleep to allow the index to update. This should probably - # check that the index is updated, or use a retry loop - # instead. - time.sleep(5) - print('Publish complete!') - - -if __name__ == '__main__': - main() diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/cli.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/cli.rs deleted file mode 100644 index cd1fea8f1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/cli.rs +++ /dev/null @@ -1,485 +0,0 @@ -use anyhow::anyhow; -use cargo::core::{features, CliUnstable}; -use cargo::{self, drop_print, drop_println, CliResult, Config}; -use clap::{AppSettings, Arg, ArgMatches}; -use itertools::Itertools; -use std::collections::HashMap; -use std::fmt::Write; - -use super::commands; -use super::list_commands; -use crate::command_prelude::*; -use cargo::core::features::HIDDEN; - -lazy_static::lazy_static! { - // Maps from commonly known external commands (not builtin to cargo) to their - // description, for the help page. Reserved for external subcommands that are - // core within the rust ecosystem (esp ones that might become internal in the future). - static ref KNOWN_EXTERNAL_COMMAND_DESCRIPTIONS: HashMap<&'static str, &'static str> = HashMap::from([ - ("clippy", "Checks a package to catch common mistakes and improve your Rust code."), - ("fmt", "Formats all bin and lib files of the current crate using rustfmt."), - ]); -} - -pub fn main(config: &mut Config) -> CliResult { - // CAUTION: Be careful with using `config` until it is configured below. - // In general, try to avoid loading config values unless necessary (like - // the [alias] table). - - if commands::help::handle_embedded_help(config) { - return Ok(()); - } - - let args = match cli().get_matches_safe() { - Ok(args) => args, - Err(e) => { - if e.kind == clap::ErrorKind::UnrecognizedSubcommand { - // An unrecognized subcommand might be an external subcommand. - let cmd = &e.info.as_ref().unwrap()[0].to_owned(); - return super::execute_external_subcommand(config, cmd, &[cmd, "--help"]) - .map_err(|_| e.into()); - } else { - return Err(e.into()); - } - } - }; - - // Global args need to be extracted before expanding aliases because the - // clap code for extracting a subcommand discards global options - // (appearing before the subcommand). - let (expanded_args, global_args) = expand_aliases(config, args, vec![])?; - - if expanded_args.value_of("unstable-features") == Some("help") { - let options = CliUnstable::help(); - let non_hidden_options: Vec<(String, String)> = options - .iter() - .filter(|(_, help_message)| *help_message != HIDDEN) - .map(|(name, help)| (name.to_string(), help.to_string())) - .collect(); - let longest_option = non_hidden_options - .iter() - .map(|(option_name, _)| option_name.len()) - .max() - .unwrap_or(0); - let help_lines: Vec = non_hidden_options - .iter() - .map(|(option_name, option_help_message)| { - let option_name_kebab_case = option_name.replace("_", "-"); - let padding = " ".repeat(longest_option - option_name.len()); // safe to subtract - format!( - " -Z {}{} -- {}", - option_name_kebab_case, padding, option_help_message - ) - }) - .collect(); - let joined = help_lines.join("\n"); - drop_println!( - config, - " -Available unstable (nightly-only) flags: - -{} - -Run with 'cargo -Z [FLAG] [SUBCOMMAND]'", - joined - ); - if !config.nightly_features_allowed { - drop_println!( - config, - "\nUnstable flags are only available on the nightly channel \ - of Cargo, but this is the `{}` channel.\n\ - {}", - features::channel(), - features::SEE_CHANNELS - ); - } - drop_println!( - config, - "\nSee https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ - for more information about these flags." - ); - return Ok(()); - } - - let is_verbose = expanded_args.occurrences_of("verbose") > 0; - if expanded_args.is_present("version") { - let version = get_version_string(is_verbose); - drop_print!(config, "{}", version); - return Ok(()); - } - - if let Some(code) = expanded_args.value_of("explain") { - let mut procss = config.load_global_rustc(None)?.process(); - procss.arg("--explain").arg(code).exec()?; - return Ok(()); - } - - if expanded_args.is_present("list") { - drop_println!(config, "Installed Commands:"); - for (name, command) in list_commands(config) { - let known_external_desc = KNOWN_EXTERNAL_COMMAND_DESCRIPTIONS.get(name.as_str()); - match command { - CommandInfo::BuiltIn { about } => { - assert!( - known_external_desc.is_none(), - "KNOWN_EXTERNAL_COMMANDS shouldn't contain builtin \"{}\"", - name - ); - let summary = about.unwrap_or_default(); - let summary = summary.lines().next().unwrap_or(&summary); // display only the first line - drop_println!(config, " {:<20} {}", name, summary); - } - CommandInfo::External { path } => { - if let Some(desc) = known_external_desc { - drop_println!(config, " {:<20} {}", name, desc); - } else if is_verbose { - drop_println!(config, " {:<20} {}", name, path.display()); - } else { - drop_println!(config, " {}", name); - } - } - CommandInfo::Alias { target } => { - drop_println!( - config, - " {:<20} alias: {}", - name, - target.iter().join(" ") - ); - } - } - } - return Ok(()); - } - - let (cmd, subcommand_args) = match expanded_args.subcommand() { - (cmd, Some(args)) => (cmd, args), - _ => { - // No subcommand provided. - cli().print_help()?; - return Ok(()); - } - }; - config_configure(config, &expanded_args, subcommand_args, global_args)?; - super::init_git_transports(config); - - execute_subcommand(config, cmd, subcommand_args) -} - -pub fn get_version_string(is_verbose: bool) -> String { - let version = cargo::version(); - let mut version_string = format!("cargo {}\n", version); - if is_verbose { - version_string.push_str(&format!("release: {}\n", version.version,)); - if let Some(ref cfg) = version.cfg_info { - if let Some(ref ci) = cfg.commit_info { - version_string.push_str(&format!("commit-hash: {}\n", ci.commit_hash)); - version_string.push_str(&format!("commit-date: {}\n", ci.commit_date)); - } - } - writeln!(version_string, "host: {}", env!("RUST_HOST_TARGET")).unwrap(); - add_libgit2(&mut version_string); - add_curl(&mut version_string); - add_ssl(&mut version_string); - writeln!(version_string, "os: {}", os_info::get()).unwrap(); - } - version_string -} - -fn add_libgit2(version_string: &mut String) { - let git2_v = git2::Version::get(); - let lib_v = git2_v.libgit2_version(); - let vendored = if git2_v.vendored() { - format!("vendored") - } else { - format!("system") - }; - writeln!( - version_string, - "libgit2: {}.{}.{} (sys:{} {})", - lib_v.0, - lib_v.1, - lib_v.2, - git2_v.crate_version(), - vendored - ) - .unwrap(); -} - -fn add_curl(version_string: &mut String) { - let curl_v = curl::Version::get(); - let vendored = if curl_v.vendored() { - format!("vendored") - } else { - format!("system") - }; - writeln!( - version_string, - "libcurl: {} (sys:{} {} ssl:{})", - curl_v.version(), - curl_sys::rust_crate_version(), - vendored, - curl_v.ssl_version().unwrap_or("none") - ) - .unwrap(); -} - -fn add_ssl(version_string: &mut String) { - #[cfg(feature = "openssl")] - { - writeln!(version_string, "ssl: {}", openssl::version::version()).unwrap(); - } - #[cfg(not(feature = "openssl"))] - { - let _ = version_string; // Silence unused warning. - } -} - -fn expand_aliases( - config: &mut Config, - args: ArgMatches<'static>, - mut already_expanded: Vec, -) -> Result<(ArgMatches<'static>, GlobalArgs), CliError> { - if let (cmd, Some(args)) = args.subcommand() { - match ( - commands::builtin_exec(cmd), - super::aliased_command(config, cmd)?, - ) { - (Some(_), Some(_)) => { - // User alias conflicts with a built-in subcommand - config.shell().warn(format!( - "user-defined alias `{}` is ignored, because it is shadowed by a built-in command", - cmd, - ))?; - } - (Some(_), None) => { - // Command is built-in and is not conflicting with alias, but contains ignored values. - if let Some(mut values) = args.values_of("") { - config.shell().warn(format!( - "trailing arguments after built-in command `{}` are ignored: `{}`", - cmd, - values.join(" "), - ))?; - } - } - (None, None) => {} - (_, Some(mut alias)) => { - // Check if this alias is shadowing an external subcommand - // (binary of the form `cargo-`) - // Currently this is only a warning, but after a transition period this will become - // a hard error. - if let Some(path) = super::find_external_subcommand(config, cmd) { - config.shell().warn(format!( - "\ -user-defined alias `{}` is shadowing an external subcommand found at: `{}` -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #10049 .", - cmd, - path.display(), - ))?; - } - - alias.extend( - args.values_of("") - .unwrap_or_default() - .map(|s| s.to_string()), - ); - // new_args strips out everything before the subcommand, so - // capture those global options now. - // Note that an alias to an external command will not receive - // these arguments. That may be confusing, but such is life. - let global_args = GlobalArgs::new(args); - let new_args = cli() - .setting(AppSettings::NoBinaryName) - .get_matches_from_safe(alias)?; - - let (new_cmd, _) = new_args.subcommand(); - already_expanded.push(cmd.to_string()); - if already_expanded.contains(&new_cmd.to_string()) { - // Crash if the aliases are corecursive / unresolvable - return Err(anyhow!( - "alias {} has unresolvable recursive definition: {} -> {}", - already_expanded[0], - already_expanded.join(" -> "), - new_cmd, - ) - .into()); - } - - let (expanded_args, _) = expand_aliases(config, new_args, already_expanded)?; - return Ok((expanded_args, global_args)); - } - } - }; - - Ok((args, GlobalArgs::default())) -} - -fn config_configure( - config: &mut Config, - args: &ArgMatches<'_>, - subcommand_args: &ArgMatches<'_>, - global_args: GlobalArgs, -) -> CliResult { - let arg_target_dir = &subcommand_args.value_of_path("target-dir", config); - let verbose = global_args.verbose + args.occurrences_of("verbose") as u32; - // quiet is unusual because it is redefined in some subcommands in order - // to provide custom help text. - let quiet = - args.is_present("quiet") || subcommand_args.is_present("quiet") || global_args.quiet; - let global_color = global_args.color; // Extract so it can take reference. - let color = args.value_of("color").or_else(|| global_color.as_deref()); - let frozen = args.is_present("frozen") || global_args.frozen; - let locked = args.is_present("locked") || global_args.locked; - let offline = args.is_present("offline") || global_args.offline; - let mut unstable_flags = global_args.unstable_flags; - if let Some(values) = args.values_of("unstable-features") { - unstable_flags.extend(values.map(|s| s.to_string())); - } - let mut config_args = global_args.config_args; - if let Some(values) = args.values_of("config") { - config_args.extend(values.map(|s| s.to_string())); - } - config.configure( - verbose, - quiet, - color, - frozen, - locked, - offline, - arg_target_dir, - &unstable_flags, - &config_args, - )?; - Ok(()) -} - -fn execute_subcommand( - config: &mut Config, - cmd: &str, - subcommand_args: &ArgMatches<'_>, -) -> CliResult { - if let Some(exec) = commands::builtin_exec(cmd) { - return exec(config, subcommand_args); - } - - let mut ext_args: Vec<&str> = vec![cmd]; - ext_args.extend(subcommand_args.values_of("").unwrap_or_default()); - super::execute_external_subcommand(config, cmd, &ext_args) -} - -#[derive(Default)] -struct GlobalArgs { - verbose: u32, - quiet: bool, - color: Option, - frozen: bool, - locked: bool, - offline: bool, - unstable_flags: Vec, - config_args: Vec, -} - -impl GlobalArgs { - fn new(args: &ArgMatches<'_>) -> GlobalArgs { - GlobalArgs { - verbose: args.occurrences_of("verbose") as u32, - quiet: args.is_present("quiet"), - color: args.value_of("color").map(|s| s.to_string()), - frozen: args.is_present("frozen"), - locked: args.is_present("locked"), - offline: args.is_present("offline"), - unstable_flags: args - .values_of_lossy("unstable-features") - .unwrap_or_default(), - config_args: args - .values_of("config") - .unwrap_or_default() - .map(|s| s.to_string()) - .collect(), - } - } -} - -fn cli() -> App { - let is_rustup = std::env::var_os("RUSTUP_HOME").is_some(); - let usage = if is_rustup { - "cargo [+toolchain] [OPTIONS] [SUBCOMMAND]" - } else { - "cargo [OPTIONS] [SUBCOMMAND]" - }; - App::new("cargo") - .settings(&[ - AppSettings::UnifiedHelpMessage, - AppSettings::DeriveDisplayOrder, - AppSettings::VersionlessSubcommands, - AppSettings::AllowExternalSubcommands, - ]) - .usage(usage) - .template( - "\ -Rust's package manager - -USAGE: - {usage} - -OPTIONS: -{unified} - -Some common cargo commands are (see all commands with --list): - build, b Compile the current package - check, c Analyze the current package and report errors, but don't build object files - clean Remove the target directory - doc, d Build this package's and its dependencies' documentation - new Create a new cargo package - init Create a new cargo package in an existing directory - run, r Run a binary or example of the local package - test, t Run the tests - bench Run the benchmarks - update Update dependencies listed in Cargo.lock - search Search registry for crates - publish Package and upload this package to the registry - install Install a Rust binary. Default location is $HOME/.cargo/bin - uninstall Uninstall a Rust binary - -See 'cargo help ' for more information on a specific command.\n", - ) - .arg(opt("version", "Print version info and exit").short("V")) - .arg(opt("list", "List installed commands")) - .arg(opt("explain", "Run `rustc --explain CODE`").value_name("CODE")) - .arg( - opt( - "verbose", - "Use verbose output (-vv very verbose/build.rs output)", - ) - .short("v") - .multiple(true) - .global(true), - ) - .arg_quiet() - .arg( - opt("color", "Coloring: auto, always, never") - .value_name("WHEN") - .global(true), - ) - .arg(opt("frozen", "Require Cargo.lock and cache are up to date").global(true)) - .arg(opt("locked", "Require Cargo.lock is up to date").global(true)) - .arg(opt("offline", "Run without accessing the network").global(true)) - .arg( - multi_opt( - "config", - "KEY=VALUE", - "Override a configuration value (unstable)", - ) - .global(true), - ) - .arg( - Arg::with_name("unstable-features") - .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details") - .short("Z") - .value_name("FLAG") - .multiple(true) - .number_of_values(1) - .global(true), - ) - .subcommands(commands::builtin()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/bench.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/bench.rs deleted file mode 100644 index 39ec1be22..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/bench.rs +++ /dev/null @@ -1,83 +0,0 @@ -use crate::command_prelude::*; -use cargo::ops::{self, TestOptions}; - -pub fn cli() -> App { - subcommand("bench") - .setting(AppSettings::TrailingVarArg) - .about("Execute all benchmarks of a local package") - .arg_quiet() - .arg( - Arg::with_name("BENCHNAME") - .help("If specified, only run benches containing this string in their names"), - ) - .arg( - Arg::with_name("args") - .help("Arguments for the bench binary") - .multiple(true) - .last(true), - ) - .arg_targets_all( - "Benchmark only this package's library", - "Benchmark only the specified binary", - "Benchmark all binaries", - "Benchmark only the specified example", - "Benchmark all examples", - "Benchmark only the specified test target", - "Benchmark all tests", - "Benchmark only the specified bench target", - "Benchmark all benches", - "Benchmark all targets", - ) - .arg(opt("no-run", "Compile, but don't run benchmarks")) - .arg_package_spec( - "Package to run benchmarks for", - "Benchmark all packages in the workspace", - "Exclude packages from the benchmark", - ) - .arg_jobs() - .arg_profile("Build artifacts with the specified profile") - .arg_features() - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg_manifest_path() - .arg_ignore_rust_version() - .arg_message_format() - .arg(opt( - "no-fail-fast", - "Run all benchmarks regardless of failure", - )) - .arg_unit_graph() - .after_help("Run `cargo help bench` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - let mut compile_opts = args.compile_options( - config, - CompileMode::Bench, - Some(&ws), - ProfileChecking::Custom, - )?; - - compile_opts.build_config.requested_profile = - args.get_profile_name(config, "bench", ProfileChecking::Custom)?; - - let ops = TestOptions { - no_run: args.is_present("no-run"), - no_fail_fast: args.is_present("no-fail-fast"), - compile_opts, - }; - - let bench_args = args.value_of("BENCHNAME").into_iter(); - let bench_args = bench_args.chain(args.values_of("args").unwrap_or_default()); - let bench_args = bench_args.collect::>(); - - let err = ops::run_benches(&ws, &ops, &bench_args)?; - match err { - None => Ok(()), - Some(err) => Err(match err.code { - Some(i) => CliError::new(anyhow::format_err!("bench failed"), i), - None => CliError::new(err.into(), 101), - }), - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/build.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/build.rs deleted file mode 100644 index ad6705119..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/build.rs +++ /dev/null @@ -1,72 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("build") - // subcommand aliases are handled in aliased_command() - // .alias("b") - .about("Compile a local package and all of its dependencies") - .arg_quiet() - .arg_package_spec( - "Package to build (see `cargo help pkgid`)", - "Build all packages in the workspace", - "Exclude packages from the build", - ) - .arg_jobs() - .arg_targets_all( - "Build only this package's library", - "Build only the specified binary", - "Build all binaries", - "Build only the specified example", - "Build all examples", - "Build only the specified test target", - "Build all tests", - "Build only the specified bench target", - "Build all benches", - "Build all targets", - ) - .arg_release("Build artifacts in release mode, with optimizations") - .arg_profile("Build artifacts with the specified profile") - .arg_features() - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg( - opt( - "out-dir", - "Copy final artifacts to this directory (unstable)", - ) - .value_name("PATH"), - ) - .arg_manifest_path() - .arg_ignore_rust_version() - .arg_message_format() - .arg_build_plan() - .arg_unit_graph() - .arg_future_incompat_report() - .after_help("Run `cargo help build` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - let mut compile_opts = args.compile_options( - config, - CompileMode::Build, - Some(&ws), - ProfileChecking::Custom, - )?; - - if let Some(out_dir) = args.value_of_path("out-dir", config) { - compile_opts.build_config.export_dir = Some(out_dir); - } else if let Some(out_dir) = config.build_config()?.out_dir.as_ref() { - let out_dir = out_dir.resolve_path(config); - compile_opts.build_config.export_dir = Some(out_dir); - } - if compile_opts.build_config.export_dir.is_some() { - config - .cli_unstable() - .fail_if_stable_opt("--out-dir", 6790)?; - } - ops::compile(&ws, &compile_opts)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/check.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/check.rs deleted file mode 100644 index 3be146c6d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/check.rs +++ /dev/null @@ -1,52 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("check") - // subcommand aliases are handled in aliased_command() - // .alias("c") - .about("Check a local package and all of its dependencies for errors") - .arg_quiet() - .arg_package_spec( - "Package(s) to check", - "Check all packages in the workspace", - "Exclude packages from the check", - ) - .arg_jobs() - .arg_targets_all( - "Check only this package's library", - "Check only the specified binary", - "Check all binaries", - "Check only the specified example", - "Check all examples", - "Check only the specified test target", - "Check all tests", - "Check only the specified bench target", - "Check all benches", - "Check all targets", - ) - .arg_release("Check artifacts in release mode, with optimizations") - .arg_profile("Check artifacts with the specified profile") - .arg_features() - .arg_target_triple("Check for the target triple") - .arg_target_dir() - .arg_manifest_path() - .arg_ignore_rust_version() - .arg_message_format() - .arg_unit_graph() - .arg_future_incompat_report() - .after_help("Run `cargo help check` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - // This is a legacy behavior that causes `cargo check` to pass `--test`. - let test = matches!(args.value_of("profile"), Some("test")); - let mode = CompileMode::Check { test }; - let compile_opts = - args.compile_options(config, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; - - ops::compile(&ws, &compile_opts)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/clean.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/clean.rs deleted file mode 100644 index c966c65f1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/clean.rs +++ /dev/null @@ -1,37 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops::{self, CleanOptions}; -use cargo::util::print_available_packages; - -pub fn cli() -> App { - subcommand("clean") - .about("Remove artifacts that cargo has generated in the past") - .arg_quiet() - .arg_package_spec_simple("Package to clean artifacts for") - .arg_manifest_path() - .arg_target_triple("Target triple to clean output for") - .arg_target_dir() - .arg_release("Whether or not to clean release artifacts") - .arg_profile("Clean artifacts of the specified profile") - .arg_doc("Whether or not to clean just the documentation directory") - .after_help("Run `cargo help clean` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - - if args.is_present_with_zero_values("package") { - print_available_packages(&ws)?; - } - - let opts = CleanOptions { - config, - spec: values(args, "package"), - targets: args.targets(), - requested_profile: args.get_profile_name(config, "dev", ProfileChecking::Custom)?, - profile_specified: args.is_present("profile") || args.is_present("release"), - doc: args.is_present("doc"), - }; - ops::clean(&ws, &opts)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/config.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/config.rs deleted file mode 100644 index 61938dfc2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/config.rs +++ /dev/null @@ -1,48 +0,0 @@ -use crate::command_prelude::*; -use cargo::ops::cargo_config; - -pub fn cli() -> App { - subcommand("config") - .about("Inspect configuration values") - .after_help("Run `cargo help config` for more detailed information.\n") - .setting(clap::AppSettings::SubcommandRequiredElseHelp) - .subcommand( - subcommand("get") - .arg(Arg::with_name("key").help("The config key to display")) - .arg( - opt("format", "Display format") - .possible_values(cargo_config::ConfigFormat::POSSIBLE_VALUES) - .default_value("toml"), - ) - .arg(opt( - "show-origin", - "Display where the config value is defined", - )) - .arg( - opt("merged", "Whether or not to merge config values") - .possible_values(&["yes", "no"]) - .default_value("yes"), - ), - ) -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - config - .cli_unstable() - .fail_if_stable_command(config, "config", 9301)?; - match args.subcommand() { - ("get", Some(args)) => { - let opts = cargo_config::GetOptions { - key: args.value_of("key"), - format: args.value_of("format").unwrap().parse()?, - show_origin: args.is_present("show-origin"), - merged: args.value_of("merged") == Some("yes"), - }; - cargo_config::get(config, &opts)?; - } - (cmd, _) => { - panic!("unexpected command `{}`", cmd) - } - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/doc.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/doc.rs deleted file mode 100644 index 21d561394..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/doc.rs +++ /dev/null @@ -1,57 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops::{self, DocOptions}; - -pub fn cli() -> App { - subcommand("doc") - // subcommand aliases are handled in aliased_command() - // .alias("d") - .about("Build a package's documentation") - .arg_quiet() - .arg(opt( - "open", - "Opens the docs in a browser after the operation", - )) - .arg_package_spec( - "Package to document", - "Document all packages in the workspace", - "Exclude packages from the build", - ) - .arg(opt("no-deps", "Don't build documentation for dependencies")) - .arg(opt("document-private-items", "Document private items")) - .arg_jobs() - .arg_targets_lib_bin_example( - "Document only this package's library", - "Document only the specified binary", - "Document all binaries", - "Document only the specified example", - "Document all examples", - ) - .arg_release("Build artifacts in release mode, with optimizations") - .arg_profile("Build artifacts with the specified profile") - .arg_features() - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg_manifest_path() - .arg_message_format() - .arg_ignore_rust_version() - .arg_unit_graph() - .after_help("Run `cargo help doc` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - let mode = CompileMode::Doc { - deps: !args.is_present("no-deps"), - }; - let mut compile_opts = - args.compile_options(config, mode, Some(&ws), ProfileChecking::Custom)?; - compile_opts.rustdoc_document_private_items = args.is_present("document-private-items"); - - let doc_opts = DocOptions { - open_result: args.is_present("open"), - compile_opts, - }; - ops::doc(&ws, &doc_opts)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/fetch.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/fetch.rs deleted file mode 100644 index ff3fcbec5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/fetch.rs +++ /dev/null @@ -1,24 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; -use cargo::ops::FetchOptions; - -pub fn cli() -> App { - subcommand("fetch") - .about("Fetch dependencies of a package from the network") - .arg_quiet() - .arg_manifest_path() - .arg_target_triple("Fetch dependencies for the target triple") - .after_help("Run `cargo help fetch` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - - let opts = FetchOptions { - config, - targets: args.targets(), - }; - let _ = ops::fetch(&ws, &opts)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/fix.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/fix.rs deleted file mode 100644 index 85cf955e4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/fix.rs +++ /dev/null @@ -1,103 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops::{self, CompileFilter, FilterRule, LibRule}; - -pub fn cli() -> App { - subcommand("fix") - .about("Automatically fix lint warnings reported by rustc") - .arg_quiet() - .arg_package_spec( - "Package(s) to fix", - "Fix all packages in the workspace", - "Exclude packages from the fixes", - ) - .arg_jobs() - .arg_targets_all( - "Fix only this package's library", - "Fix only the specified binary", - "Fix all binaries", - "Fix only the specified example", - "Fix all examples", - "Fix only the specified test target", - "Fix all tests", - "Fix only the specified bench target", - "Fix all benches", - "Fix all targets (default)", - ) - .arg_release("Fix artifacts in release mode, with optimizations") - .arg_profile("Build artifacts with the specified profile") - .arg_features() - .arg_target_triple("Fix for the target triple") - .arg_target_dir() - .arg_manifest_path() - .arg_message_format() - .arg( - Arg::with_name("broken-code") - .long("broken-code") - .help("Fix code even if it already has compiler errors"), - ) - .arg( - Arg::with_name("edition") - .long("edition") - .help("Fix in preparation for the next edition"), - ) - .arg( - Arg::with_name("idioms") - .long("edition-idioms") - .help("Fix warnings to migrate to the idioms of an edition"), - ) - .arg( - Arg::with_name("allow-no-vcs") - .long("allow-no-vcs") - .help("Fix code even if a VCS was not detected"), - ) - .arg( - Arg::with_name("allow-dirty") - .long("allow-dirty") - .help("Fix code even if the working directory is dirty"), - ) - .arg( - Arg::with_name("allow-staged") - .long("allow-staged") - .help("Fix code even if the working directory has staged changes"), - ) - .arg_ignore_rust_version() - .after_help("Run `cargo help fix` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - // This is a legacy behavior that causes `cargo fix` to pass `--test`. - let test = matches!(args.value_of("profile"), Some("test")); - let mode = CompileMode::Check { test }; - - // Unlike other commands default `cargo fix` to all targets to fix as much - // code as we can. - let mut opts = - args.compile_options(config, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; - - if let CompileFilter::Default { .. } = opts.filter { - opts.filter = CompileFilter::Only { - all_targets: true, - lib: LibRule::Default, - bins: FilterRule::All, - examples: FilterRule::All, - benches: FilterRule::All, - tests: FilterRule::All, - } - } - - ops::fix( - &ws, - &mut ops::FixOptions { - edition: args.is_present("edition"), - idioms: args.is_present("idioms"), - compile_opts: opts, - allow_dirty: args.is_present("allow-dirty"), - allow_no_vcs: args.is_present("allow-no-vcs"), - allow_staged: args.is_present("allow-staged"), - broken_code: args.is_present("broken-code"), - }, - )?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/generate_lockfile.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/generate_lockfile.rs deleted file mode 100644 index 1eebdbd41..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/generate_lockfile.rs +++ /dev/null @@ -1,17 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("generate-lockfile") - .about("Generate the lockfile for a package") - .arg_quiet() - .arg_manifest_path() - .after_help("Run `cargo help generate-lockfile` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - ops::generate_lockfile(&ws)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/git_checkout.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/git_checkout.rs deleted file mode 100644 index aae435a51..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/git_checkout.rs +++ /dev/null @@ -1,14 +0,0 @@ -use crate::command_prelude::*; - -const REMOVED: &str = "The `git-checkout` subcommand has been removed."; - -pub fn cli() -> App { - subcommand("git-checkout") - .about("This subcommand has been removed") - .settings(&[AppSettings::Hidden]) - .help(REMOVED) -} - -pub fn exec(_config: &mut Config, _args: &ArgMatches<'_>) -> CliResult { - Err(anyhow::format_err!(REMOVED).into()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/help.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/help.rs deleted file mode 100644 index 16b7e71e6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/help.rs +++ /dev/null @@ -1,148 +0,0 @@ -use crate::aliased_command; -use cargo::util::errors::CargoResult; -use cargo::{drop_println, Config}; -use cargo_util::paths::resolve_executable; -use flate2::read::GzDecoder; -use std::ffi::OsString; -use std::io::Read; -use std::io::Write; -use std::path::Path; - -const COMPRESSED_MAN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/man.tgz")); - -/// Checks if the `help` command is being issued. -/// -/// This runs before clap processing, because it needs to intercept the `help` -/// command if a man page is available. -/// -/// Returns `true` if help information was successfully displayed to the user. -/// In this case, Cargo should exit. -pub fn handle_embedded_help(config: &Config) -> bool { - match try_help(config) { - Ok(true) => true, - Ok(false) => false, - Err(e) => { - log::warn!("help failed: {:?}", e); - false - } - } -} - -fn try_help(config: &Config) -> CargoResult { - let mut args = std::env::args_os() - .skip(1) - .skip_while(|arg| arg.to_str().map_or(false, |s| s.starts_with('-'))); - if !args - .next() - .map_or(false, |arg| arg.to_str() == Some("help")) - { - return Ok(false); - } - let subcommand = match args.next() { - Some(arg) => arg, - None => return Ok(false), - }; - let subcommand = match subcommand.to_str() { - Some(s) => s, - None => return Ok(false), - }; - - let subcommand = match check_alias(config, subcommand) { - // If this alias is more than a simple subcommand pass-through, show the alias. - Some(argv) if argv.len() > 1 => { - let alias = argv.join(" "); - drop_println!(config, "`{}` is aliased to `{}`", subcommand, alias); - return Ok(true); - } - // Otherwise, resolve the alias into its subcommand. - Some(argv) => { - // An alias with an empty argv can be created via `"empty-alias" = ""`. - let first = argv.get(0).map(String::as_str).unwrap_or(subcommand); - first.to_string() - } - None => subcommand.to_string(), - }; - - let subcommand = match check_builtin(&subcommand) { - Some(s) => s, - None => return Ok(false), - }; - - if resolve_executable(Path::new("man")).is_ok() { - let man = match extract_man(subcommand, "1") { - Some(man) => man, - None => return Ok(false), - }; - write_and_spawn(subcommand, &man, "man")?; - } else { - let txt = match extract_man(subcommand, "txt") { - Some(txt) => txt, - None => return Ok(false), - }; - if resolve_executable(Path::new("less")).is_ok() { - write_and_spawn(subcommand, &txt, "less")?; - } else if resolve_executable(Path::new("more")).is_ok() { - write_and_spawn(subcommand, &txt, "more")?; - } else { - drop(std::io::stdout().write_all(&txt)); - } - } - Ok(true) -} - -/// Checks if the given subcommand is an alias. -/// -/// Returns None if it is not an alias. -fn check_alias(config: &Config, subcommand: &str) -> Option> { - aliased_command(config, subcommand).ok().flatten() -} - -/// Checks if the given subcommand is a built-in command (not via an alias). -/// -/// Returns None if it is not a built-in command. -fn check_builtin(subcommand: &str) -> Option<&str> { - super::builtin_exec(subcommand).map(|_| subcommand) -} - -/// Extracts the given man page from the compressed archive. -/// -/// Returns None if the command wasn't found. -fn extract_man(subcommand: &str, extension: &str) -> Option> { - let extract_name = OsString::from(format!("cargo-{}.{}", subcommand, extension)); - let gz = GzDecoder::new(COMPRESSED_MAN); - let mut ar = tar::Archive::new(gz); - // Unwraps should be safe here, since this is a static archive generated - // by our build script. It should never be an invalid format! - for entry in ar.entries().unwrap() { - let mut entry = entry.unwrap(); - let path = entry.path().unwrap(); - if path.file_name().unwrap() != extract_name { - continue; - } - let mut result = Vec::new(); - entry.read_to_end(&mut result).unwrap(); - return Some(result); - } - None -} - -/// Write the contents of a man page to disk and spawn the given command to -/// display it. -fn write_and_spawn(name: &str, contents: &[u8], command: &str) -> CargoResult<()> { - let prefix = format!("cargo-{}.", name); - let mut tmp = tempfile::Builder::new().prefix(&prefix).tempfile()?; - let f = tmp.as_file_mut(); - f.write_all(contents)?; - f.flush()?; - let path = tmp.path(); - // Use a path relative to the temp directory so that it can work on - // cygwin/msys systems which don't handle windows-style paths. - let mut relative_name = std::ffi::OsString::from("./"); - relative_name.push(path.file_name().unwrap()); - let mut cmd = std::process::Command::new(command) - .arg(relative_name) - .current_dir(path.parent().unwrap()) - .spawn()?; - drop(cmd.wait()); - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/init.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/init.rs deleted file mode 100644 index 257d30756..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/init.rs +++ /dev/null @@ -1,22 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("init") - .about("Create a new cargo package in an existing directory") - .arg_quiet() - .arg(Arg::with_name("path").default_value(".")) - .arg(opt("registry", "Registry to use").value_name("REGISTRY")) - .arg_new_opts() - .after_help("Run `cargo help init` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let opts = args.new_options(config)?; - let project_kind = ops::init(&opts, config)?; - config - .shell() - .status("Created", format!("{} package", project_kind))?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/install.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/install.rs deleted file mode 100644 index 79be166e4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/install.rs +++ /dev/null @@ -1,157 +0,0 @@ -use crate::command_prelude::*; - -use cargo::core::{GitReference, SourceId}; -use cargo::ops; -use cargo::util::IntoUrl; - -pub fn cli() -> App { - subcommand("install") - .about("Install a Rust binary. Default location is $HOME/.cargo/bin") - .arg_quiet() - .arg(Arg::with_name("crate").empty_values(false).multiple(true)) - .arg( - opt("version", "Specify a version to install") - .alias("vers") - .value_name("VERSION") - .requires("crate"), - ) - .arg( - opt("git", "Git URL to install the specified crate from") - .value_name("URL") - .conflicts_with_all(&["path", "index", "registry"]), - ) - .arg( - opt("branch", "Branch to use when installing from git") - .value_name("BRANCH") - .requires("git"), - ) - .arg( - opt("tag", "Tag to use when installing from git") - .value_name("TAG") - .requires("git"), - ) - .arg( - opt("rev", "Specific commit to use when installing from git") - .value_name("SHA") - .requires("git"), - ) - .arg( - opt("path", "Filesystem path to local crate to install") - .value_name("PATH") - .conflicts_with_all(&["git", "index", "registry"]), - ) - .arg(opt( - "list", - "list all installed packages and their versions", - )) - .arg_jobs() - .arg(opt("force", "Force overwriting existing crates or binaries").short("f")) - .arg(opt("no-track", "Do not save tracking information")) - .arg_features() - .arg_profile("Install artifacts with the specified profile") - .arg(opt("debug", "Build in debug mode instead of release mode")) - .arg_targets_bins_examples( - "Install only the specified binary", - "Install all binaries", - "Install only the specified example", - "Install all examples", - ) - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg(opt("root", "Directory to install packages into").value_name("DIR")) - .arg( - opt("index", "Registry index to install from") - .value_name("INDEX") - .requires("crate") - .conflicts_with_all(&["git", "path", "registry"]), - ) - .arg( - opt("registry", "Registry to use") - .value_name("REGISTRY") - .requires("crate") - .conflicts_with_all(&["git", "path", "index"]), - ) - .arg_message_format() - .after_help("Run `cargo help install` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - if let Some(path) = args.value_of_path("path", config) { - config.reload_rooted_at(path)?; - } else { - // TODO: Consider calling set_search_stop_path(home). - config.reload_rooted_at(config.home().clone().into_path_unlocked())?; - } - - let krates = args - .values_of("crate") - .unwrap_or_default() - .collect::>(); - - let mut from_cwd = false; - - let source = if let Some(url) = args.value_of("git") { - let url = url.into_url()?; - let gitref = if let Some(branch) = args.value_of("branch") { - GitReference::Branch(branch.to_string()) - } else if let Some(tag) = args.value_of("tag") { - GitReference::Tag(tag.to_string()) - } else if let Some(rev) = args.value_of("rev") { - GitReference::Rev(rev.to_string()) - } else { - GitReference::DefaultBranch - }; - SourceId::for_git(&url, gitref)? - } else if let Some(path) = args.value_of_path("path", config) { - SourceId::for_path(&path)? - } else if krates.is_empty() { - from_cwd = true; - SourceId::for_path(config.cwd())? - } else if let Some(registry) = args.registry(config)? { - SourceId::alt_registry(config, ®istry)? - } else if let Some(index) = args.value_of("index") { - SourceId::for_registry(&index.into_url()?)? - } else { - SourceId::crates_io(config)? - }; - - let version = args.value_of("version"); - let root = args.value_of("root"); - - // We only provide workspace information for local crate installation from - // one of the following sources: - // - From current working directory (only work for edition 2015). - // - From a specific local file path. - let workspace = if from_cwd || args.is_present("path") { - args.workspace(config).ok() - } else { - None - }; - - let mut compile_opts = args.compile_options( - config, - CompileMode::Build, - workspace.as_ref(), - ProfileChecking::Custom, - )?; - - compile_opts.build_config.requested_profile = - args.get_profile_name(config, "release", ProfileChecking::Custom)?; - - if args.is_present("list") { - ops::install_list(root, config)?; - } else { - ops::install( - config, - root, - krates, - source, - from_cwd, - version, - &compile_opts, - args.is_present("force"), - args.is_present("no-track"), - )?; - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/locate_project.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/locate_project.rs deleted file mode 100644 index a045e1454..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/locate_project.rs +++ /dev/null @@ -1,93 +0,0 @@ -use crate::command_prelude::*; -use anyhow::bail; -use cargo::{drop_println, CargoResult}; -use serde::Serialize; - -pub fn cli() -> App { - subcommand("locate-project") - .about("Print a JSON representation of a Cargo.toml file's location") - .arg_quiet() - .arg_manifest_path() - .arg( - opt( - "message-format", - "Output representation [possible values: json, plain]", - ) - .value_name("FMT"), - ) - .arg(opt("workspace", "Locate Cargo.toml of the workspace root")) - .after_help("Run `cargo help locate-project` for more detailed information.\n") -} - -#[derive(Serialize)] -pub struct ProjectLocation<'a> { - root: &'a str, -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let root_manifest; - let workspace; - let root = match WhatToFind::parse(args) { - WhatToFind::CurrentManifest => { - root_manifest = args.root_manifest(config)?; - &root_manifest - } - WhatToFind::Workspace => { - workspace = args.workspace(config)?; - workspace.root_manifest() - } - }; - - let root = root - .to_str() - .ok_or_else(|| { - anyhow::format_err!( - "your package path contains characters \ - not representable in Unicode" - ) - }) - .map_err(|e| CliError::new(e, 1))?; - - let location = ProjectLocation { root }; - - match MessageFormat::parse(args)? { - MessageFormat::Json => config.shell().print_json(&location)?, - MessageFormat::Plain => drop_println!(config, "{}", location.root), - } - - Ok(()) -} - -enum WhatToFind { - CurrentManifest, - Workspace, -} - -impl WhatToFind { - fn parse(args: &ArgMatches<'_>) -> Self { - if args.is_present("workspace") { - WhatToFind::Workspace - } else { - WhatToFind::CurrentManifest - } - } -} - -enum MessageFormat { - Json, - Plain, -} - -impl MessageFormat { - fn parse(args: &ArgMatches<'_>) -> CargoResult { - let fmt = match args.value_of("message-format") { - Some(fmt) => fmt, - None => return Ok(MessageFormat::Json), - }; - match fmt.to_ascii_lowercase().as_str() { - "json" => Ok(MessageFormat::Json), - "plain" => Ok(MessageFormat::Plain), - s => bail!("invalid message format specifier: `{}`", s), - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/login.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/login.rs deleted file mode 100644 index 0f51d5296..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/login.rs +++ /dev/null @@ -1,24 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("login") - .about( - "Save an api token from the registry locally. \ - If token is not specified, it will be read from stdin.", - ) - .arg_quiet() - .arg(Arg::with_name("token")) - .arg(opt("registry", "Registry to use").value_name("REGISTRY")) - .after_help("Run `cargo help login` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - ops::registry_login( - config, - args.value_of("token").map(String::from), - args.value_of("registry").map(String::from), - )?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/logout.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/logout.rs deleted file mode 100644 index 4ce9498e7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/logout.rs +++ /dev/null @@ -1,21 +0,0 @@ -use crate::command_prelude::*; -use cargo::ops; - -pub fn cli() -> App { - subcommand("logout") - .about("Remove an API token from the registry locally") - .arg_quiet() - .arg(opt("registry", "Registry to use").value_name("REGISTRY")) - .after_help("Run `cargo help logout` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - if !config.cli_unstable().credential_process { - config - .cli_unstable() - .fail_if_stable_command(config, "logout", 8933)?; - } - config.load_credentials()?; - ops::registry_logout(config, args.value_of("registry").map(String::from))?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/metadata.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/metadata.rs deleted file mode 100644 index 66e856b48..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/metadata.rs +++ /dev/null @@ -1,56 +0,0 @@ -use crate::command_prelude::*; -use cargo::ops::{self, OutputMetadataOptions}; - -pub fn cli() -> App { - subcommand("metadata") - .about( - "Output the resolved dependencies of a package, \ - the concrete used versions including overrides, \ - in machine-readable format", - ) - .arg_quiet() - .arg_features() - .arg(multi_opt( - "filter-platform", - "TRIPLE", - "Only include resolve dependencies matching the given target-triple", - )) - .arg(opt( - "no-deps", - "Output information only about the workspace members \ - and don't fetch dependencies", - )) - .arg_manifest_path() - .arg( - opt("format-version", "Format version") - .value_name("VERSION") - .possible_value("1"), - ) - .after_help("Run `cargo help metadata` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - - let version = match args.value_of("format-version") { - None => { - config.shell().warn( - "please specify `--format-version` flag explicitly \ - to avoid compatibility problems", - )?; - 1 - } - Some(version) => version.parse().unwrap(), - }; - - let options = OutputMetadataOptions { - cli_features: args.cli_features()?, - no_deps: args.is_present("no-deps"), - filter_platforms: args._values_of("filter-platform"), - version, - }; - - let result = ops::output_metadata(&ws, &options)?; - config.shell().print_json(&result)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/mod.rs deleted file mode 100644 index 838902da3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/mod.rs +++ /dev/null @@ -1,120 +0,0 @@ -use crate::command_prelude::*; - -pub fn builtin() -> Vec { - vec![ - bench::cli(), - build::cli(), - check::cli(), - clean::cli(), - config::cli(), - doc::cli(), - fetch::cli(), - fix::cli(), - generate_lockfile::cli(), - git_checkout::cli(), - init::cli(), - install::cli(), - locate_project::cli(), - login::cli(), - logout::cli(), - metadata::cli(), - new::cli(), - owner::cli(), - package::cli(), - pkgid::cli(), - publish::cli(), - read_manifest::cli(), - report::cli(), - run::cli(), - rustc::cli(), - rustdoc::cli(), - search::cli(), - test::cli(), - tree::cli(), - uninstall::cli(), - update::cli(), - vendor::cli(), - verify_project::cli(), - version::cli(), - yank::cli(), - ] -} - -pub fn builtin_exec(cmd: &str) -> Option) -> CliResult> { - let f = match cmd { - "bench" => bench::exec, - "build" => build::exec, - "check" => check::exec, - "clean" => clean::exec, - "config" => config::exec, - "doc" => doc::exec, - "fetch" => fetch::exec, - "fix" => fix::exec, - "generate-lockfile" => generate_lockfile::exec, - "git-checkout" => git_checkout::exec, - "init" => init::exec, - "install" => install::exec, - "locate-project" => locate_project::exec, - "login" => login::exec, - "logout" => logout::exec, - "metadata" => metadata::exec, - "new" => new::exec, - "owner" => owner::exec, - "package" => package::exec, - "pkgid" => pkgid::exec, - "publish" => publish::exec, - "read-manifest" => read_manifest::exec, - "report" => report::exec, - "run" => run::exec, - "rustc" => rustc::exec, - "rustdoc" => rustdoc::exec, - "search" => search::exec, - "test" => test::exec, - "tree" => tree::exec, - "uninstall" => uninstall::exec, - "update" => update::exec, - "vendor" => vendor::exec, - "verify-project" => verify_project::exec, - "version" => version::exec, - "yank" => yank::exec, - _ => return None, - }; - Some(f) -} - -pub mod bench; -pub mod build; -pub mod check; -pub mod clean; -pub mod config; -pub mod doc; -pub mod fetch; -pub mod fix; -pub mod generate_lockfile; -pub mod git_checkout; -pub mod help; -pub mod init; -pub mod install; -pub mod locate_project; -pub mod login; -pub mod logout; -pub mod metadata; -pub mod new; -pub mod owner; -pub mod package; -pub mod pkgid; -pub mod publish; -pub mod read_manifest; -pub mod report; -pub mod run; -pub mod rustc; -pub mod rustdoc; -pub mod search; -pub mod test; -pub mod tree; -pub mod uninstall; -pub mod update; -pub mod vendor; -pub mod verify_project; -pub mod version; -pub mod yank; diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/new.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/new.rs deleted file mode 100644 index c1828fd86..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/new.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("new") - .about("Create a new cargo package at ") - .arg_quiet() - .arg(Arg::with_name("path").required(true)) - .arg(opt("registry", "Registry to use").value_name("REGISTRY")) - .arg_new_opts() - .after_help("Run `cargo help new` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let opts = args.new_options(config)?; - - ops::new(&opts, config)?; - let path = args.value_of("path").unwrap(); - let package_name = if let Some(name) = args.value_of("name") { - name - } else { - path - }; - config.shell().status( - "Created", - format!("{} `{}` package", opts.kind, package_name), - )?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/owner.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/owner.rs deleted file mode 100644 index fd3d6169b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/owner.rs +++ /dev/null @@ -1,52 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops::{self, OwnersOptions}; - -pub fn cli() -> App { - subcommand("owner") - .about("Manage the owners of a crate on the registry") - .arg_quiet() - .arg(Arg::with_name("crate")) - .arg( - multi_opt( - "add", - "LOGIN", - "Name of a user or team to invite as an owner", - ) - .short("a"), - ) - .arg( - multi_opt( - "remove", - "LOGIN", - "Name of a user or team to remove as an owner", - ) - .short("r"), - ) - .arg(opt("list", "List owners of a crate").short("l")) - .arg(opt("index", "Registry index to modify owners for").value_name("INDEX")) - .arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) - .arg(opt("registry", "Registry to use").value_name("REGISTRY")) - .after_help("Run `cargo help owner` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - config.load_credentials()?; - - let registry = args.registry(config)?; - let opts = OwnersOptions { - krate: args.value_of("crate").map(|s| s.to_string()), - token: args.value_of("token").map(|s| s.to_string()), - index: args.value_of("index").map(|s| s.to_string()), - to_add: args - .values_of("add") - .map(|xs| xs.map(|s| s.to_string()).collect()), - to_remove: args - .values_of("remove") - .map(|xs| xs.map(|s| s.to_string()).collect()), - list: args.is_present("list"), - registry, - }; - ops::modify_owners(config, &opts)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/package.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/package.rs deleted file mode 100644 index 875a0b0ab..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/package.rs +++ /dev/null @@ -1,61 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops::{self, PackageOpts}; - -pub fn cli() -> App { - subcommand("package") - .about("Assemble the local package into a distributable tarball") - .arg_quiet() - .arg( - opt( - "list", - "Print files included in a package without making one", - ) - .short("l"), - ) - .arg(opt( - "no-verify", - "Don't verify the contents by building them", - )) - .arg(opt( - "no-metadata", - "Ignore warnings about a lack of human-usable metadata", - )) - .arg(opt( - "allow-dirty", - "Allow dirty working directories to be packaged", - )) - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg_features() - .arg_package_spec_no_all( - "Package(s) to assemble", - "Assemble all packages in the workspace", - "Don't assemble specified packages", - ) - .arg_manifest_path() - .arg_jobs() - .after_help("Run `cargo help package` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - let specs = args.packages_from_flags()?; - - ops::package( - &ws, - &PackageOpts { - config, - verify: !args.is_present("no-verify"), - list: args.is_present("list"), - check_metadata: !args.is_present("no-metadata"), - allow_dirty: args.is_present("allow-dirty"), - to_package: specs, - targets: args.targets(), - jobs: args.jobs()?, - cli_features: args.cli_features()?, - }, - )?; - - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/pkgid.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/pkgid.rs deleted file mode 100644 index 5bf7d8c22..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/pkgid.rs +++ /dev/null @@ -1,25 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; -use cargo::util::print_available_packages; - -pub fn cli() -> App { - subcommand("pkgid") - .about("Print a fully qualified package specification") - .arg_quiet() - .arg(Arg::with_name("spec")) - .arg_package("Argument to get the package ID specifier for") - .arg_manifest_path() - .after_help("Run `cargo help pkgid` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - if args.is_present_with_zero_values("package") { - print_available_packages(&ws)? - } - let spec = args.value_of("spec").or_else(|| args.value_of("package")); - let spec = ops::pkgid(&ws, spec)?; - cargo::drop_println!(config, "{}", spec); - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/publish.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/publish.rs deleted file mode 100644 index 869fbccdf..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/publish.rs +++ /dev/null @@ -1,54 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops::{self, PublishOpts}; - -pub fn cli() -> App { - subcommand("publish") - .about("Upload a package to the registry") - .arg_quiet() - .arg_index() - .arg(opt("token", "Token to use when uploading").value_name("TOKEN")) - .arg(opt( - "no-verify", - "Don't verify the contents by building them", - )) - .arg(opt( - "allow-dirty", - "Allow dirty working directories to be packaged", - )) - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg_package("Package to publish") - .arg_manifest_path() - .arg_features() - .arg_jobs() - .arg_dry_run("Perform all checks without uploading") - .arg(opt("registry", "Registry to publish to").value_name("REGISTRY")) - .after_help("Run `cargo help publish` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - config.load_credentials()?; - - let registry = args.registry(config)?; - let ws = args.workspace(config)?; - let index = args.index()?; - - ops::publish( - &ws, - &PublishOpts { - config, - token: args.value_of("token").map(|s| s.to_string()), - index, - verify: !args.is_present("no-verify"), - allow_dirty: args.is_present("allow-dirty"), - to_publish: args.packages_from_flags()?, - targets: args.targets(), - jobs: args.jobs()?, - dry_run: args.is_present("dry-run"), - registry, - cli_features: args.cli_features()?, - }, - )?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/read_manifest.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/read_manifest.rs deleted file mode 100644 index 86867152c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/read_manifest.rs +++ /dev/null @@ -1,22 +0,0 @@ -use crate::command_prelude::*; - -pub fn cli() -> App { - subcommand("read-manifest") - .about( - "\ -Print a JSON representation of a Cargo.toml manifest. - -Deprecated, use `cargo metadata --no-deps` instead.\ -", - ) - .arg_quiet() - .arg_manifest_path() -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - config - .shell() - .print_json(&ws.current()?.serialized(config))?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/report.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/report.rs deleted file mode 100644 index 34a79bb8f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/report.rs +++ /dev/null @@ -1,43 +0,0 @@ -use crate::command_prelude::*; -use cargo::core::compiler::future_incompat::{OnDiskReports, REPORT_PREAMBLE}; -use cargo::drop_println; - -pub fn cli() -> App { - subcommand("report") - .about("Generate and display various kinds of reports") - .after_help("Run `cargo help report` for more detailed information.\n") - .setting(clap::AppSettings::SubcommandRequiredElseHelp) - .subcommand( - subcommand("future-incompatibilities") - .alias("future-incompat") - .about("Reports any crates which will eventually stop compiling") - .arg( - opt( - "id", - "identifier of the report generated by a Cargo command invocation", - ) - .value_name("id"), - ) - .arg_package("Package to display a report for"), - ) -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - match args.subcommand() { - ("future-incompatibilities", Some(args)) => report_future_incompatibilies(config, args), - (cmd, _) => panic!("unexpected command `{}`", cmd), - } -} - -fn report_future_incompatibilies(config: &Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - let reports = OnDiskReports::load(&ws)?; - let id = args - .value_of_u32("id")? - .unwrap_or_else(|| reports.last_id()); - let krate = args.value_of("package"); - let report = reports.get_report(id, config, krate)?; - drop_println!(config, "{}", REPORT_PREAMBLE); - drop(config.shell().print_ansi_stdout(report.as_bytes())); - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/run.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/run.rs deleted file mode 100644 index 75f317c94..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/run.rs +++ /dev/null @@ -1,108 +0,0 @@ -use crate::command_prelude::*; -use crate::util::restricted_names::is_glob_pattern; -use cargo::core::Verbosity; -use cargo::ops::{self, CompileFilter, Packages}; -use cargo_util::ProcessError; - -pub fn cli() -> App { - subcommand("run") - // subcommand aliases are handled in aliased_command() - // .alias("r") - .setting(AppSettings::TrailingVarArg) - .about("Run a binary or example of the local package") - .arg_quiet() - .arg(Arg::with_name("args").multiple(true)) - .arg_targets_bin_example( - "Name of the bin target to run", - "Name of the example target to run", - ) - .arg_package("Package with the target to run") - .arg_jobs() - .arg_release("Build artifacts in release mode, with optimizations") - .arg_profile("Build artifacts with the specified profile") - .arg_features() - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg_manifest_path() - .arg_message_format() - .arg_unit_graph() - .arg_ignore_rust_version() - .after_help("Run `cargo help run` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - - let mut compile_opts = args.compile_options( - config, - CompileMode::Build, - Some(&ws), - ProfileChecking::Custom, - )?; - - // Disallow `spec` to be an glob pattern - if let Packages::Packages(opt_in) = &compile_opts.spec { - if let Some(pattern) = opt_in.iter().find(|s| is_glob_pattern(s)) { - return Err(anyhow::anyhow!( - "`cargo run` does not support glob pattern `{}` on package selection", - pattern, - ) - .into()); - } - } - - if !args.is_present("example") && !args.is_present("bin") { - let default_runs: Vec<_> = compile_opts - .spec - .get_packages(&ws)? - .iter() - .filter_map(|pkg| pkg.manifest().default_run()) - .collect(); - if default_runs.len() == 1 { - compile_opts.filter = CompileFilter::from_raw_arguments( - false, - vec![default_runs[0].to_owned()], - false, - vec![], - false, - vec![], - false, - vec![], - false, - false, - ); - } else { - // ops::run will take care of errors if len pkgs != 1. - compile_opts.filter = CompileFilter::Default { - // Force this to false because the code in ops::run is not - // able to pre-check features before compilation starts to - // enforce that only 1 binary is built. - required_features_filterable: false, - }; - } - }; - - ops::run(&ws, &compile_opts, &values_os(args, "args")).map_err(|err| { - let proc_err = match err.downcast_ref::() { - Some(e) => e, - None => return CliError::new(err, 101), - }; - - // If we never actually spawned the process then that sounds pretty - // bad and we always want to forward that up. - let exit_code = match proc_err.code { - Some(exit) => exit, - None => return CliError::new(err, 101), - }; - - // If `-q` was passed then we suppress extra error information about - // a failed process, we assume the process itself printed out enough - // information about why it failed so we don't do so as well - let is_quiet = config.shell().verbosity() == Verbosity::Quiet; - if is_quiet { - CliError::code(exit_code) - } else { - CliError::new(err, exit_code) - } - }) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/rustc.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/rustc.rs deleted file mode 100644 index 750505dc0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/rustc.rs +++ /dev/null @@ -1,98 +0,0 @@ -use crate::command_prelude::*; -use cargo::ops; -use cargo::util::interning::InternedString; - -const PRINT_ARG_NAME: &str = "print"; -const CRATE_TYPE_ARG_NAME: &str = "crate-type"; - -pub fn cli() -> App { - subcommand("rustc") - .setting(AppSettings::TrailingVarArg) - .about("Compile a package, and pass extra options to the compiler") - .arg_quiet() - .arg(Arg::with_name("args").multiple(true).help("Rustc flags")) - .arg_package("Package to build") - .arg_jobs() - .arg_targets_all( - "Build only this package's library", - "Build only the specified binary", - "Build all binaries", - "Build only the specified example", - "Build all examples", - "Build only the specified test target", - "Build all tests", - "Build only the specified bench target", - "Build all benches", - "Build all targets", - ) - .arg_release("Build artifacts in release mode, with optimizations") - .arg_profile("Build artifacts with the specified profile") - .arg_features() - .arg_target_triple("Target triple which compiles will be for") - .arg( - opt( - PRINT_ARG_NAME, - "Output compiler information without compiling", - ) - .value_name("INFO"), - ) - .arg(multi_opt( - CRATE_TYPE_ARG_NAME, - "CRATE-TYPE", - "Comma separated list of types of crates for the compiler to emit (unstable)", - )) - .arg_target_dir() - .arg_manifest_path() - .arg_message_format() - .arg_unit_graph() - .arg_ignore_rust_version() - .arg_future_incompat_report() - .after_help("Run `cargo help rustc` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - // This is a legacy behavior that changes the behavior based on the profile. - // If we want to support this more formally, I think adding a --mode flag - // would be warranted. - let mode = match args.value_of("profile") { - Some("test") => CompileMode::Test, - Some("bench") => CompileMode::Bench, - Some("check") => CompileMode::Check { test: false }, - _ => CompileMode::Build, - }; - let mut compile_opts = args.compile_options_for_single_package( - config, - mode, - Some(&ws), - ProfileChecking::LegacyRustc, - )?; - if compile_opts.build_config.requested_profile == "check" { - compile_opts.build_config.requested_profile = InternedString::new("dev"); - } - let target_args = values(args, "args"); - compile_opts.target_rustc_args = if target_args.is_empty() { - None - } else { - Some(target_args) - }; - if let Some(opt_value) = args.value_of(PRINT_ARG_NAME) { - config - .cli_unstable() - .fail_if_stable_opt(PRINT_ARG_NAME, 9357)?; - ops::print(&ws, &compile_opts, opt_value)?; - return Ok(()); - } - let crate_types = values(args, CRATE_TYPE_ARG_NAME); - compile_opts.target_rustc_crate_types = if crate_types.is_empty() { - None - } else { - config - .cli_unstable() - .fail_if_stable_opt(CRATE_TYPE_ARG_NAME, 10083)?; - Some(crate_types) - }; - ops::compile(&ws, &compile_opts)?; - - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/rustdoc.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/rustdoc.rs deleted file mode 100644 index a6a32440c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/rustdoc.rs +++ /dev/null @@ -1,61 +0,0 @@ -use cargo::ops::{self, DocOptions}; - -use crate::command_prelude::*; - -pub fn cli() -> App { - subcommand("rustdoc") - .setting(AppSettings::TrailingVarArg) - .about("Build a package's documentation, using specified custom flags.") - .arg_quiet() - .arg(Arg::with_name("args").multiple(true)) - .arg(opt( - "open", - "Opens the docs in a browser after the operation", - )) - .arg_package("Package to document") - .arg_jobs() - .arg_targets_all( - "Build only this package's library", - "Build only the specified binary", - "Build all binaries", - "Build only the specified example", - "Build all examples", - "Build only the specified test target", - "Build all tests", - "Build only the specified bench target", - "Build all benches", - "Build all targets", - ) - .arg_release("Build artifacts in release mode, with optimizations") - .arg_profile("Build artifacts with the specified profile") - .arg_features() - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg_manifest_path() - .arg_message_format() - .arg_unit_graph() - .arg_ignore_rust_version() - .after_help("Run `cargo help rustdoc` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - let mut compile_opts = args.compile_options_for_single_package( - config, - CompileMode::Doc { deps: false }, - Some(&ws), - ProfileChecking::Custom, - )?; - let target_args = values(args, "args"); - compile_opts.target_rustdoc_args = if target_args.is_empty() { - None - } else { - Some(target_args) - }; - let doc_opts = DocOptions { - open_result: args.is_present("open"), - compile_opts, - }; - ops::doc(&ws, &doc_opts)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/search.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/search.rs deleted file mode 100644 index f3f1d0467..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/search.rs +++ /dev/null @@ -1,33 +0,0 @@ -use crate::command_prelude::*; - -use std::cmp::min; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("search") - .about("Search packages in crates.io") - .arg_quiet() - .arg(Arg::with_name("query").multiple(true)) - .arg_index() - .arg( - opt( - "limit", - "Limit the number of results (default: 10, max: 100)", - ) - .value_name("LIMIT"), - ) - .arg(opt("registry", "Registry to use").value_name("REGISTRY")) - .after_help("Run `cargo help search` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let registry = args.registry(config)?; - let index = args.index()?; - let limit = args.value_of_u32("limit")?; - let limit = min(100, limit.unwrap_or(10)); - let query: Vec<&str> = args.values_of("query").unwrap_or_default().collect(); - let query: String = query.join("+"); - ops::search(&query, config, index, limit, registry)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/test.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/test.rs deleted file mode 100644 index d03ed99d2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/test.rs +++ /dev/null @@ -1,140 +0,0 @@ -use crate::command_prelude::*; -use anyhow::Error; -use cargo::ops::{self, CompileFilter, FilterRule, LibRule}; - -pub fn cli() -> App { - subcommand("test") - // Subcommand aliases are handled in `aliased_command()`. - // .alias("t") - .setting(AppSettings::TrailingVarArg) - .about("Execute all unit and integration tests and build examples of a local package") - .arg( - Arg::with_name("TESTNAME") - .help("If specified, only run tests containing this string in their names"), - ) - .arg( - Arg::with_name("args") - .help("Arguments for the test binary") - .multiple(true) - .last(true), - ) - .arg( - opt( - "quiet", - "Display one character per test instead of one line", - ) - .short("q"), - ) - .arg_targets_all( - "Test only this package's library unit tests", - "Test only the specified binary", - "Test all binaries", - "Test only the specified example", - "Test all examples", - "Test only the specified test target", - "Test all tests", - "Test only the specified bench target", - "Test all benches", - "Test all targets", - ) - .arg(opt("doc", "Test only this library's documentation")) - .arg(opt("no-run", "Compile, but don't run tests")) - .arg(opt("no-fail-fast", "Run all tests regardless of failure")) - .arg_package_spec( - "Package to run tests for", - "Test all packages in the workspace", - "Exclude packages from the test", - ) - .arg_jobs() - .arg_release("Build artifacts in release mode, with optimizations") - .arg_profile("Build artifacts with the specified profile") - .arg_features() - .arg_target_triple("Build for the target triple") - .arg_target_dir() - .arg_manifest_path() - .arg_ignore_rust_version() - .arg_message_format() - .arg_unit_graph() - .arg_future_incompat_report() - .after_help( - "Run `cargo help test` for more detailed information.\n\ - Run `cargo test -- --help` for test binary options.\n", - ) -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - - let mut compile_opts = args.compile_options( - config, - CompileMode::Test, - Some(&ws), - ProfileChecking::Custom, - )?; - - compile_opts.build_config.requested_profile = - args.get_profile_name(config, "test", ProfileChecking::Custom)?; - - // `TESTNAME` is actually an argument of the test binary, but it's - // important, so we explicitly mention it and reconfigure. - let test_name: Option<&str> = args.value_of("TESTNAME"); - let test_args = args.value_of("TESTNAME").into_iter(); - let test_args = test_args.chain(args.values_of("args").unwrap_or_default()); - let test_args = test_args.collect::>(); - - let no_run = args.is_present("no-run"); - let doc = args.is_present("doc"); - if doc { - if let CompileFilter::Only { .. } = compile_opts.filter { - return Err(CliError::new( - anyhow::format_err!("Can't mix --doc with other target selecting options"), - 101, - )); - } - if no_run { - return Err(CliError::new( - anyhow::format_err!("Can't skip running doc tests with --no-run"), - 101, - )); - } - compile_opts.build_config.mode = CompileMode::Doctest; - compile_opts.filter = ops::CompileFilter::new( - LibRule::True, - FilterRule::none(), - FilterRule::none(), - FilterRule::none(), - FilterRule::none(), - ); - } else if test_name.is_some() { - if let CompileFilter::Default { .. } = compile_opts.filter { - compile_opts.filter = ops::CompileFilter::new( - LibRule::Default, // compile the library, so the unit tests can be run filtered - FilterRule::All, // compile the binaries, so the unit tests in binaries can be run filtered - FilterRule::All, // compile the tests, so the integration tests can be run filtered - FilterRule::none(), // specify --examples to unit test binaries filtered - FilterRule::none(), // specify --benches to unit test benchmarks filtered - ); // also, specify --doc to run doc tests filtered - } - } - - let ops = ops::TestOptions { - no_run, - no_fail_fast: args.is_present("no-fail-fast"), - compile_opts, - }; - - let err = ops::run_tests(&ws, &ops, &test_args)?; - match err { - None => Ok(()), - Some(err) => { - let context = anyhow::format_err!("{}", err.hint(&ws, &ops.compile_opts)); - let e = match err.code { - // Don't show "process didn't exit successfully" for simple errors. - Some(i) if cargo_util::is_simple_exit_code(i) => CliError::new(context, i), - Some(i) => CliError::new(Error::from(err).context(context), i), - None => CliError::new(Error::from(err).context(context), 101), - }; - Err(e) - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/tree.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/tree.rs deleted file mode 100644 index f45e21ba6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/tree.rs +++ /dev/null @@ -1,322 +0,0 @@ -use crate::cli; -use crate::command_prelude::*; -use anyhow::{bail, format_err}; -use cargo::core::dependency::DepKind; -use cargo::ops::tree::{self, EdgeKind}; -use cargo::ops::Packages; -use cargo::util::print_available_packages; -use cargo::util::CargoResult; -use std::collections::HashSet; -use std::str::FromStr; - -pub fn cli() -> App { - subcommand("tree") - .about("Display a tree visualization of a dependency graph") - .arg_quiet() - .arg_manifest_path() - .arg_package_spec_no_all( - "Package to be used as the root of the tree", - "Display the tree for all packages in the workspace", - "Exclude specific workspace members", - ) - // Deprecated, use --no-dedupe instead. - .arg(Arg::with_name("all").long("all").short("a").hidden(true)) - // Deprecated, use --target=all instead. - .arg( - Arg::with_name("all-targets") - .long("all-targets") - .hidden(true), - ) - .arg_features() - .arg_target_triple( - "Filter dependencies matching the given target-triple (default host platform). \ - Pass `all` to include all targets.", - ) - // Deprecated, use -e=no-dev instead. - .arg( - Arg::with_name("no-dev-dependencies") - .long("no-dev-dependencies") - .hidden(true), - ) - .arg( - multi_opt( - "edges", - "KINDS", - "The kinds of dependencies to display \ - (features, normal, build, dev, all, \ - no-normal, no-build, no-dev, no-proc-macro)", - ) - .short("e"), - ) - .arg( - optional_multi_opt( - "invert", - "SPEC", - "Invert the tree direction and focus on the given package", - ) - .short("i"), - ) - .arg(multi_opt( - "prune", - "SPEC", - "Prune the given package from the display of the dependency tree", - )) - .arg(opt("depth", "Maximum display depth of the dependency tree").value_name("DEPTH")) - // Deprecated, use --prefix=none instead. - .arg(Arg::with_name("no-indent").long("no-indent").hidden(true)) - // Deprecated, use --prefix=depth instead. - .arg( - Arg::with_name("prefix-depth") - .long("prefix-depth") - .hidden(true), - ) - .arg( - opt( - "prefix", - "Change the prefix (indentation) of how each entry is displayed", - ) - .value_name("PREFIX") - .possible_values(&["depth", "indent", "none"]) - .default_value("indent"), - ) - .arg(opt( - "no-dedupe", - "Do not de-duplicate (repeats all shared dependencies)", - )) - .arg( - opt( - "duplicates", - "Show only dependencies which come in multiple versions (implies -i)", - ) - .short("d") - .alias("duplicate"), - ) - .arg( - opt("charset", "Character set to use in output: utf8, ascii") - .value_name("CHARSET") - .possible_values(&["utf8", "ascii"]) - .default_value("utf8"), - ) - .arg( - opt("format", "Format string used for printing dependencies") - .value_name("FORMAT") - .short("f") - .default_value("{p}"), - ) - .arg( - // Backwards compatibility with old cargo-tree. - Arg::with_name("version") - .long("version") - .short("V") - .hidden(true), - ) - .after_help("Run `cargo help tree` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - if args.is_present("version") { - let verbose = args.occurrences_of("verbose") > 0; - let version = cli::get_version_string(verbose); - cargo::drop_print!(config, "{}", version); - return Ok(()); - } - let prefix = if args.is_present("no-indent") { - config - .shell() - .warn("the --no-indent flag has been changed to --prefix=none")?; - "none" - } else if args.is_present("prefix-depth") { - config - .shell() - .warn("the --prefix-depth flag has been changed to --prefix=depth")?; - "depth" - } else { - args.value_of("prefix").unwrap() - }; - let prefix = tree::Prefix::from_str(prefix).map_err(|e| anyhow::anyhow!("{}", e))?; - - let no_dedupe = args.is_present("no-dedupe") || args.is_present("all"); - if args.is_present("all") { - config.shell().warn( - "The `cargo tree` --all flag has been changed to --no-dedupe, \ - and may be removed in a future version.\n\ - If you are looking to display all workspace members, use the --workspace flag.", - )?; - } - - let targets = if args.is_present("all-targets") { - config - .shell() - .warn("the --all-targets flag has been changed to --target=all")?; - vec!["all".to_string()] - } else { - args._values_of("target") - }; - let target = tree::Target::from_cli(targets); - - let (edge_kinds, no_proc_macro) = parse_edge_kinds(config, args)?; - let graph_features = edge_kinds.contains(&EdgeKind::Feature); - - let pkgs_to_prune = args._values_of("prune"); - - let packages = args.packages_from_flags()?; - let mut invert = args - .values_of("invert") - .map_or_else(|| Vec::new(), |is| is.map(|s| s.to_string()).collect()); - if args.is_present_with_zero_values("invert") { - match &packages { - Packages::Packages(ps) => { - // Backwards compatibility with old syntax of `cargo tree -i -p foo`. - invert.extend(ps.clone()); - } - _ => { - return Err(format_err!( - "The `-i` flag requires a package name.\n\ -\n\ -The `-i` flag is used to inspect the reverse dependencies of a specific\n\ -package. It will invert the tree and display the packages that depend on the\n\ -given package.\n\ -\n\ -Note that in a workspace, by default it will only display the package's\n\ -reverse dependencies inside the tree of the workspace member in the current\n\ -directory. The --workspace flag can be used to extend it so that it will show\n\ -the package's reverse dependencies across the entire workspace. The -p flag\n\ -can be used to display the package's reverse dependencies only with the\n\ -subtree of the package given to -p.\n\ -" - ) - .into()); - } - } - } - - let ws = args.workspace(config)?; - - if args.is_present_with_zero_values("package") { - print_available_packages(&ws)?; - } - - let charset = tree::Charset::from_str(args.value_of("charset").unwrap()) - .map_err(|e| anyhow::anyhow!("{}", e))?; - let opts = tree::TreeOptions { - cli_features: args.cli_features()?, - packages, - target, - edge_kinds, - invert, - pkgs_to_prune, - prefix, - no_dedupe, - duplicates: args.is_present("duplicates"), - charset, - format: args.value_of("format").unwrap().to_string(), - graph_features, - max_display_depth: args.value_of_u32("depth")?.unwrap_or(u32::MAX), - no_proc_macro, - }; - - if opts.graph_features && opts.duplicates { - return Err(format_err!("the `-e features` flag does not support `--duplicates`").into()); - } - - tree::build_and_print(&ws, &opts)?; - Ok(()) -} - -/// Parses `--edges` option. -/// -/// Returns a tuple of `EdgeKind` map and `no_proc_marco` flag. -fn parse_edge_kinds( - config: &Config, - args: &ArgMatches<'_>, -) -> CargoResult<(HashSet, bool)> { - let (kinds, no_proc_macro) = { - let mut no_proc_macro = false; - let mut kinds = args.values_of("edges").map_or_else( - || Vec::new(), - |es| { - es.flat_map(|e| e.split(',')) - .filter(|e| { - no_proc_macro = *e == "no-proc-macro"; - !no_proc_macro - }) - .collect() - }, - ); - - if args.is_present("no-dev-dependencies") { - config - .shell() - .warn("the --no-dev-dependencies flag has changed to -e=no-dev")?; - kinds.push("no-dev"); - } - - if kinds.is_empty() { - kinds.extend(&["normal", "build", "dev"]); - } - - (kinds, no_proc_macro) - }; - - let mut result = HashSet::new(); - let insert_defaults = |result: &mut HashSet| { - result.insert(EdgeKind::Dep(DepKind::Normal)); - result.insert(EdgeKind::Dep(DepKind::Build)); - result.insert(EdgeKind::Dep(DepKind::Development)); - }; - let unknown = |k| { - bail!( - "unknown edge kind `{}`, valid values are \ - \"normal\", \"build\", \"dev\", \ - \"no-normal\", \"no-build\", \"no-dev\", \"no-proc-macro\", \ - \"features\", or \"all\"", - k - ) - }; - if kinds.iter().any(|k| k.starts_with("no-")) { - insert_defaults(&mut result); - for kind in &kinds { - match *kind { - "no-normal" => result.remove(&EdgeKind::Dep(DepKind::Normal)), - "no-build" => result.remove(&EdgeKind::Dep(DepKind::Build)), - "no-dev" => result.remove(&EdgeKind::Dep(DepKind::Development)), - "features" => result.insert(EdgeKind::Feature), - "normal" | "build" | "dev" | "all" => { - bail!( - "`{}` dependency kind cannot be mixed with \ - \"no-normal\", \"no-build\", or \"no-dev\" \ - dependency kinds", - kind - ) - } - k => return unknown(k), - }; - } - return Ok((result, no_proc_macro)); - } - for kind in &kinds { - match *kind { - "all" => { - insert_defaults(&mut result); - result.insert(EdgeKind::Feature); - } - "features" => { - result.insert(EdgeKind::Feature); - } - "normal" => { - result.insert(EdgeKind::Dep(DepKind::Normal)); - } - "build" => { - result.insert(EdgeKind::Dep(DepKind::Build)); - } - "dev" => { - result.insert(EdgeKind::Dep(DepKind::Development)); - } - k => return unknown(k), - } - } - if kinds.len() == 1 && kinds[0] == "features" { - insert_defaults(&mut result); - } - Ok((result, no_proc_macro)) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/uninstall.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/uninstall.rs deleted file mode 100644 index f228af195..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/uninstall.rs +++ /dev/null @@ -1,33 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("uninstall") - .about("Remove a Rust binary") - .arg_quiet() - .arg(Arg::with_name("spec").multiple(true)) - .arg_package_spec_simple("Package to uninstall") - .arg(multi_opt("bin", "NAME", "Only uninstall the binary NAME")) - .arg(opt("root", "Directory to uninstall packages from").value_name("DIR")) - .after_help("Run `cargo help uninstall` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let root = args.value_of("root"); - - if args.is_present_with_zero_values("package") { - return Err(anyhow::anyhow!( - "\"--package \" requires a SPEC format value.\n\ - Run `cargo help pkgid` for more information about SPEC format." - ) - .into()); - } - - let specs = args - .values_of("spec") - .unwrap_or_else(|| args.values_of("package").unwrap_or_default()) - .collect(); - ops::uninstall(root, specs, &values(args, "bin"), config)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/update.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/update.rs deleted file mode 100644 index c1041310b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/update.rs +++ /dev/null @@ -1,45 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops::{self, UpdateOptions}; -use cargo::util::print_available_packages; - -pub fn cli() -> App { - subcommand("update") - .about("Update dependencies as recorded in the local lock file") - .arg_quiet() - .arg(opt("workspace", "Only update the workspace packages").short("w")) - .arg_package_spec_simple("Package to update") - .arg(opt( - "aggressive", - "Force updating all dependencies of SPEC as well when used with -p", - )) - .arg_dry_run("Don't actually write the lockfile") - .arg( - opt( - "precise", - "Update a single dependency to exactly PRECISE when used with -p", - ) - .value_name("PRECISE"), - ) - .arg_manifest_path() - .after_help("Run `cargo help update` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let ws = args.workspace(config)?; - - if args.is_present_with_zero_values("package") { - print_available_packages(&ws)?; - } - - let update_opts = UpdateOptions { - aggressive: args.is_present("aggressive"), - precise: args.value_of("precise"), - to_update: values(args, "package"), - dry_run: args.is_present("dry-run"), - workspace: args.is_present("workspace"), - config, - }; - ops::update_lockfile(&ws, &update_opts)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/vendor.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/vendor.rs deleted file mode 100644 index 9a96af613..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/vendor.rs +++ /dev/null @@ -1,118 +0,0 @@ -use crate::command_prelude::*; -use cargo::ops; -use std::path::PathBuf; - -pub fn cli() -> App { - subcommand("vendor") - .about("Vendor all dependencies for a project locally") - .arg_quiet() - .arg_manifest_path() - .arg(Arg::with_name("path").help("Where to vendor crates (`vendor` by default)")) - .arg( - Arg::with_name("no-delete") - .long("no-delete") - .help("Don't delete older crates in the vendor directory"), - ) - .arg( - Arg::with_name("tomls") - .short("s") - .long("sync") - .help("Additional `Cargo.toml` to sync and vendor") - .value_name("TOML") - .multiple(true), - ) - .arg( - Arg::with_name("respect-source-config") - .long("respect-source-config") - .help("Respect `[source]` config in `.cargo/config`") - .multiple(true), - ) - .arg( - Arg::with_name("versioned-dirs") - .long("versioned-dirs") - .help("Always include version in subdir name"), - ) - // Not supported. - .arg( - Arg::with_name("no-merge-sources") - .long("no-merge-sources") - .hidden(true), - ) - // Not supported. - .arg( - Arg::with_name("relative-path") - .long("relative-path") - .hidden(true), - ) - // Not supported. - .arg( - Arg::with_name("only-git-deps") - .long("only-git-deps") - .hidden(true), - ) - // Not supported. - .arg( - Arg::with_name("disallow-duplicates") - .long("disallow-duplicates") - .hidden(true), - ) - .after_help("Run `cargo help vendor` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - // We're doing the vendoring operation ourselves, so we don't actually want - // to respect any of the `source` configuration in Cargo itself. That's - // intended for other consumers of Cargo, but we want to go straight to the - // source, e.g. crates.io, to fetch crates. - if !args.is_present("respect-source-config") { - config.values_mut()?.remove("source"); - } - - // When we moved `cargo vendor` into Cargo itself we didn't stabilize a few - // flags, so try to provide a helpful error message in that case to ensure - // that users currently using the flag aren't tripped up. - let crates_io_cargo_vendor_flag = if args.is_present("no-merge-sources") { - Some("--no-merge-sources") - } else if args.is_present("relative-path") { - Some("--relative-path") - } else if args.is_present("only-git-deps") { - Some("--only-git-deps") - } else if args.is_present("disallow-duplicates") { - Some("--disallow-duplicates") - } else { - None - }; - if let Some(flag) = crates_io_cargo_vendor_flag { - return Err(anyhow::format_err!( - "\ -the crates.io `cargo vendor` command has now been merged into Cargo itself -and does not support the flag `{}` currently; to continue using the flag you -can execute `cargo-vendor vendor ...`, and if you would like to see this flag -supported in Cargo itself please feel free to file an issue at -https://github.com/rust-lang/cargo/issues/new -", - flag - ) - .into()); - } - - let ws = args.workspace(config)?; - let path = args - .value_of_os("path") - .map(|val| PathBuf::from(val.to_os_string())) - .unwrap_or_else(|| PathBuf::from("vendor")); - ops::vendor( - &ws, - &ops::VendorOptions { - no_delete: args.is_present("no-delete"), - destination: &path, - versioned_dirs: args.is_present("versioned-dirs"), - extra: args - .values_of_os("tomls") - .unwrap_or_default() - .map(|s| PathBuf::from(s.to_os_string())) - .collect(), - }, - )?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/verify_project.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/verify_project.rs deleted file mode 100644 index 4a5cf68c1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/verify_project.rs +++ /dev/null @@ -1,26 +0,0 @@ -use crate::command_prelude::*; - -use std::collections::HashMap; -use std::process; - -pub fn cli() -> App { - subcommand("verify-project") - .about("Check correctness of crate manifest") - .arg_quiet() - .arg_manifest_path() - .after_help("Run `cargo help verify-project` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - if let Err(e) = args.workspace(config) { - let mut h = HashMap::new(); - h.insert("invalid".to_string(), e.to_string()); - config.shell().print_json(&h)?; - process::exit(1) - } - - let mut h = HashMap::new(); - h.insert("success".to_string(), "true".to_string()); - config.shell().print_json(&h)?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/version.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/version.rs deleted file mode 100644 index 851887789..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/version.rs +++ /dev/null @@ -1,16 +0,0 @@ -use crate::cli; -use crate::command_prelude::*; - -pub fn cli() -> App { - subcommand("version") - .about("Show version information") - .arg_quiet() - .after_help("Run `cargo help version` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - let verbose = args.occurrences_of("verbose") > 0; - let version = cli::get_version_string(verbose); - cargo::drop_print!(config, "{}", version); - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/yank.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/yank.rs deleted file mode 100644 index 9bf3fa02c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/commands/yank.rs +++ /dev/null @@ -1,40 +0,0 @@ -use crate::command_prelude::*; - -use cargo::ops; - -pub fn cli() -> App { - subcommand("yank") - .about("Remove a pushed crate from the index") - .arg_quiet() - .arg(Arg::with_name("crate")) - .arg( - opt("vers", "The version to yank or un-yank") - .value_name("VERSION") - .required(true), - ) - .arg(opt( - "undo", - "Undo a yank, putting a version back into the index", - )) - .arg(opt("index", "Registry index to yank from").value_name("INDEX")) - .arg(opt("token", "API token to use when authenticating").value_name("TOKEN")) - .arg(opt("registry", "Registry to use").value_name("REGISTRY")) - .after_help("Run `cargo help yank` for more detailed information.\n") -} - -pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { - config.load_credentials()?; - - let registry = args.registry(config)?; - - ops::yank( - config, - args.value_of("crate").map(|s| s.to_string()), - args.value_of("vers").map(|s| s.to_string()), - args.value_of("token").map(|s| s.to_string()), - args.value_of("index").map(|s| s.to_string()), - args.is_present("undo"), - registry, - )?; - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/main.rs b/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/main.rs deleted file mode 100644 index 57895b766..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/bin/cargo/main.rs +++ /dev/null @@ -1,236 +0,0 @@ -#![warn(rust_2018_idioms)] // while we're getting used to 2018 -#![allow(clippy::all)] -#![warn(clippy::needless_borrow)] -#![warn(clippy::redundant_clone)] - -use cargo::core::shell::Shell; -use cargo::util::toml::StringOrVec; -use cargo::util::CliError; -use cargo::util::{self, closest_msg, command_prelude, CargoResult, CliResult, Config}; -use cargo_util::{ProcessBuilder, ProcessError}; -use std::collections::BTreeMap; -use std::env; -use std::fs; -use std::path::{Path, PathBuf}; - -mod cli; -mod commands; - -use crate::command_prelude::*; - -fn main() { - #[cfg(feature = "pretty-env-logger")] - pretty_env_logger::init_custom_env("CARGO_LOG"); - #[cfg(not(feature = "pretty-env-logger"))] - env_logger::init_from_env("CARGO_LOG"); - - let mut config = match Config::default() { - Ok(cfg) => cfg, - Err(e) => { - let mut shell = Shell::new(); - cargo::exit_with_error(e.into(), &mut shell) - } - }; - - let result = match cargo::ops::fix_maybe_exec_rustc(&config) { - Ok(true) => Ok(()), - Ok(false) => { - let _token = cargo::util::job::setup(); - cli::main(&mut config) - } - Err(e) => Err(CliError::from(e)), - }; - - match result { - Err(e) => cargo::exit_with_error(e, &mut *config.shell()), - Ok(()) => {} - } -} - -/// Table for defining the aliases which come builtin in `Cargo`. -/// The contents are structured as: `(alias, aliased_command, description)`. -const BUILTIN_ALIASES: [(&str, &str, &str); 5] = [ - ("b", "build", "alias: build"), - ("c", "check", "alias: check"), - ("d", "doc", "alias: doc"), - ("r", "run", "alias: run"), - ("t", "test", "alias: test"), -]; - -/// Function which contains the list of all of the builtin aliases and it's -/// corresponding execs represented as &str. -fn builtin_aliases_execs(cmd: &str) -> Option<&(&str, &str, &str)> { - BUILTIN_ALIASES.iter().find(|alias| alias.0 == cmd) -} - -fn aliased_command(config: &Config, command: &str) -> CargoResult>> { - let alias_name = format!("alias.{}", command); - let user_alias = match config.get_string(&alias_name) { - Ok(Some(record)) => Some( - record - .val - .split_whitespace() - .map(|s| s.to_string()) - .collect(), - ), - Ok(None) => None, - Err(_) => config.get::>>(&alias_name)?, - }; - - let result = user_alias.or_else(|| { - builtin_aliases_execs(command).map(|command_str| vec![command_str.1.to_string()]) - }); - Ok(result) -} - -/// List all runnable commands -fn list_commands(config: &Config) -> BTreeMap { - let prefix = "cargo-"; - let suffix = env::consts::EXE_SUFFIX; - let mut commands = BTreeMap::new(); - for dir in search_directories(config) { - let entries = match fs::read_dir(dir) { - Ok(entries) => entries, - _ => continue, - }; - for entry in entries.filter_map(|e| e.ok()) { - let path = entry.path(); - let filename = match path.file_name().and_then(|s| s.to_str()) { - Some(filename) => filename, - _ => continue, - }; - if !filename.starts_with(prefix) || !filename.ends_with(suffix) { - continue; - } - if is_executable(entry.path()) { - let end = filename.len() - suffix.len(); - commands.insert( - filename[prefix.len()..end].to_string(), - CommandInfo::External { path: path.clone() }, - ); - } - } - } - - for cmd in commands::builtin() { - commands.insert( - cmd.get_name().to_string(), - CommandInfo::BuiltIn { - about: cmd.p.meta.about.map(|s| s.to_string()), - }, - ); - } - - // Add the builtin_aliases and them descriptions to the - // `commands` `BTreeMap`. - for command in &BUILTIN_ALIASES { - commands.insert( - command.0.to_string(), - CommandInfo::BuiltIn { - about: Some(command.2.to_string()), - }, - ); - } - - // Add the user-defined aliases - if let Ok(aliases) = config.get::>("alias") { - for (name, target) in aliases.iter() { - commands.insert( - name.to_string(), - CommandInfo::Alias { - target: target.clone(), - }, - ); - } - } - - commands -} - -fn find_external_subcommand(config: &Config, cmd: &str) -> Option { - let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX); - search_directories(config) - .iter() - .map(|dir| dir.join(&command_exe)) - .find(|file| is_executable(file)) -} - -fn execute_external_subcommand(config: &Config, cmd: &str, args: &[&str]) -> CliResult { - let path = find_external_subcommand(config, cmd); - let command = match path { - Some(command) => command, - None => { - let suggestions = list_commands(config); - let did_you_mean = closest_msg(cmd, suggestions.keys(), |c| c); - let err = anyhow::format_err!("no such subcommand: `{}`{}", cmd, did_you_mean); - return Err(CliError::new(err, 101)); - } - }; - - let cargo_exe = config.cargo_exe()?; - let err = match ProcessBuilder::new(&command) - .env(cargo::CARGO_ENV, cargo_exe) - .args(args) - .exec_replace() - { - Ok(()) => return Ok(()), - Err(e) => e, - }; - - if let Some(perr) = err.downcast_ref::() { - if let Some(code) = perr.code { - return Err(CliError::code(code)); - } - } - Err(CliError::new(err, 101)) -} - -#[cfg(unix)] -fn is_executable>(path: P) -> bool { - use std::os::unix::prelude::*; - fs::metadata(path) - .map(|metadata| metadata.is_file() && metadata.permissions().mode() & 0o111 != 0) - .unwrap_or(false) -} -#[cfg(windows)] -fn is_executable>(path: P) -> bool { - path.as_ref().is_file() -} - -fn search_directories(config: &Config) -> Vec { - let mut dirs = vec![config.home().clone().into_path_unlocked().join("bin")]; - if let Some(val) = env::var_os("PATH") { - dirs.extend(env::split_paths(&val)); - } - dirs -} - -fn init_git_transports(config: &Config) { - // Only use a custom transport if any HTTP options are specified, - // such as proxies or custom certificate authorities. The custom - // transport, however, is not as well battle-tested. - - match cargo::ops::needs_custom_http_transport(config) { - Ok(true) => {} - _ => return, - } - - let handle = match cargo::ops::http_handle(config) { - Ok(handle) => handle, - Err(..) => return, - }; - - // The unsafety of the registration function derives from two aspects: - // - // 1. This call must be synchronized with all other registration calls as - // well as construction of new transports. - // 2. The argument is leaked. - // - // We're clear on point (1) because this is only called at the start of this - // binary (we know what the state of the world looks like) and we're mostly - // clear on point (2) because we'd only free it after everything is done - // anyway - unsafe { - git2_curl::register(handle); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_config.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_config.rs deleted file mode 100644 index 3770bb68c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_config.rs +++ /dev/null @@ -1,233 +0,0 @@ -use crate::core::compiler::CompileKind; -use crate::util::interning::InternedString; -use crate::util::{CargoResult, Config, RustfixDiagnosticServer}; -use anyhow::bail; -use cargo_util::ProcessBuilder; -use serde::ser; -use std::cell::RefCell; -use std::path::PathBuf; - -/// Configuration information for a rustc build. -#[derive(Debug)] -pub struct BuildConfig { - /// The requested kind of compilation for this session - pub requested_kinds: Vec, - /// Number of rustc jobs to run in parallel. - pub jobs: u32, - /// Build profile - pub requested_profile: InternedString, - /// The mode we are compiling in. - pub mode: CompileMode, - /// `true` to print stdout in JSON format (for machine reading). - pub message_format: MessageFormat, - /// Force Cargo to do a full rebuild and treat each target as changed. - pub force_rebuild: bool, - /// Output a build plan to stdout instead of actually compiling. - pub build_plan: bool, - /// Output the unit graph to stdout instead of actually compiling. - pub unit_graph: bool, - /// An optional override of the rustc process for primary units - pub primary_unit_rustc: Option, - /// A thread used by `cargo fix` to receive messages on a socket regarding - /// the success/failure of applying fixes. - pub rustfix_diagnostic_server: RefCell>, - /// The directory to copy final artifacts to. Note that even if `out_dir` is - /// set, a copy of artifacts still could be found a `target/(debug\release)` - /// as usual. - // Note that, although the cmd-line flag name is `out-dir`, in code we use - // `export_dir`, to avoid confusion with out dir at `target/debug/deps`. - pub export_dir: Option, - /// `true` to output a future incompatibility report at the end of the build - pub future_incompat_report: bool, -} - -impl BuildConfig { - /// Parses all config files to learn about build configuration. Currently - /// configured options are: - /// - /// * `build.jobs` - /// * `build.target` - /// * `target.$target.ar` - /// * `target.$target.linker` - /// * `target.$target.libfoo.metadata` - pub fn new( - config: &Config, - jobs: Option, - requested_targets: &[String], - mode: CompileMode, - ) -> CargoResult { - let cfg = config.build_config()?; - let requested_kinds = CompileKind::from_requested_targets(config, requested_targets)?; - if jobs == Some(0) { - anyhow::bail!("jobs must be at least 1") - } - if jobs.is_some() && config.jobserver_from_env().is_some() { - config.shell().warn( - "a `-j` argument was passed to Cargo but Cargo is \ - also configured with an external jobserver in \ - its environment, ignoring the `-j` parameter", - )?; - } - let jobs = jobs.or(cfg.jobs).unwrap_or(::num_cpus::get() as u32); - if jobs == 0 { - anyhow::bail!("jobs may not be 0"); - } - - Ok(BuildConfig { - requested_kinds, - jobs, - requested_profile: InternedString::new("dev"), - mode, - message_format: MessageFormat::Human, - force_rebuild: false, - build_plan: false, - unit_graph: false, - primary_unit_rustc: None, - rustfix_diagnostic_server: RefCell::new(None), - export_dir: None, - future_incompat_report: false, - }) - } - - /// Whether or not the *user* wants JSON output. Whether or not rustc - /// actually uses JSON is decided in `add_error_format`. - pub fn emit_json(&self) -> bool { - matches!(self.message_format, MessageFormat::Json { .. }) - } - - pub fn test(&self) -> bool { - self.mode == CompileMode::Test || self.mode == CompileMode::Bench - } - - pub fn single_requested_kind(&self) -> CargoResult { - match self.requested_kinds.len() { - 1 => Ok(self.requested_kinds[0]), - _ => bail!("only one `--target` argument is supported"), - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub enum MessageFormat { - Human, - Json { - /// Whether rustc diagnostics are rendered by cargo or included into the - /// output stream. - render_diagnostics: bool, - /// Whether the `rendered` field of rustc diagnostics are using the - /// "short" rendering. - short: bool, - /// Whether the `rendered` field of rustc diagnostics embed ansi color - /// codes. - ansi: bool, - }, - Short, -} - -/// The general "mode" for what to do. -/// This is used for two purposes. The commands themselves pass this in to -/// `compile_ws` to tell it the general execution strategy. This influences -/// the default targets selected. The other use is in the `Unit` struct -/// to indicate what is being done with a specific target. -#[derive(Clone, Copy, PartialEq, Debug, Eq, Hash, PartialOrd, Ord)] -pub enum CompileMode { - /// A target being built for a test. - Test, - /// Building a target with `rustc` (lib or bin). - Build, - /// Building a target with `rustc` to emit `rmeta` metadata only. If - /// `test` is true, then it is also compiled with `--test` to check it like - /// a test. - Check { test: bool }, - /// Used to indicate benchmarks should be built. This is not used in - /// `Unit`, because it is essentially the same as `Test` (indicating - /// `--test` should be passed to rustc) and by using `Test` instead it - /// allows some de-duping of Units to occur. - Bench, - /// A target that will be documented with `rustdoc`. - /// If `deps` is true, then it will also document all dependencies. - Doc { deps: bool }, - /// A target that will be tested with `rustdoc`. - Doctest, - /// An example or library that will be scraped for function calls by `rustdoc`. - Docscrape, - /// A marker for Units that represent the execution of a `build.rs` script. - RunCustomBuild, -} - -impl ser::Serialize for CompileMode { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - use self::CompileMode::*; - match *self { - Test => "test".serialize(s), - Build => "build".serialize(s), - Check { .. } => "check".serialize(s), - Bench => "bench".serialize(s), - Doc { .. } => "doc".serialize(s), - Doctest => "doctest".serialize(s), - Docscrape => "docscrape".serialize(s), - RunCustomBuild => "run-custom-build".serialize(s), - } - } -} - -impl CompileMode { - /// Returns `true` if the unit is being checked. - pub fn is_check(self) -> bool { - matches!(self, CompileMode::Check { .. }) - } - - /// Returns `true` if this is generating documentation. - pub fn is_doc(self) -> bool { - matches!(self, CompileMode::Doc { .. }) - } - - /// Returns `true` if this a doc test. - pub fn is_doc_test(self) -> bool { - self == CompileMode::Doctest - } - - /// Returns `true` if this is scraping examples for documentation. - pub fn is_doc_scrape(self) -> bool { - self == CompileMode::Docscrape - } - - /// Returns `true` if this is any type of test (test, benchmark, doc test, or - /// check test). - pub fn is_any_test(self) -> bool { - matches!( - self, - CompileMode::Test - | CompileMode::Bench - | CompileMode::Check { test: true } - | CompileMode::Doctest - ) - } - - /// Returns `true` if this is something that passes `--test` to rustc. - pub fn is_rustc_test(self) -> bool { - matches!( - self, - CompileMode::Test | CompileMode::Bench | CompileMode::Check { test: true } - ) - } - - /// Returns `true` if this is the *execution* of a `build.rs` script. - pub fn is_run_custom_build(self) -> bool { - self == CompileMode::RunCustomBuild - } - - /// Returns `true` if this mode may generate an executable. - /// - /// Note that this also returns `true` for building libraries, so you also - /// have to check the target. - pub fn generates_executable(self) -> bool { - matches!( - self, - CompileMode::Test | CompileMode::Bench | CompileMode::Build - ) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_context/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_context/mod.rs deleted file mode 100644 index 9141314a9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_context/mod.rs +++ /dev/null @@ -1,139 +0,0 @@ -use crate::core::compiler::unit_graph::UnitGraph; -use crate::core::compiler::{BuildConfig, CompileKind, Unit}; -use crate::core::profiles::Profiles; -use crate::core::PackageSet; -use crate::core::Workspace; -use crate::util::config::Config; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::Rustc; -use std::collections::{HashMap, HashSet}; -use std::path::PathBuf; - -mod target_info; -pub use self::target_info::{ - FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, -}; - -/// The build context, containing all information about a build task. -/// -/// It is intended that this is mostly static information. Stuff that mutates -/// during the build can be found in the parent `Context`. (I say mostly, -/// because this has internal caching, but nothing that should be observable -/// or require &mut.) -pub struct BuildContext<'a, 'cfg> { - /// The workspace the build is for. - pub ws: &'a Workspace<'cfg>, - - /// The cargo configuration. - pub config: &'cfg Config, - pub profiles: Profiles, - pub build_config: &'a BuildConfig, - - /// Extra compiler args for either `rustc` or `rustdoc`. - pub extra_compiler_args: HashMap>, - - // Crate types for `rustc`. - pub target_rustc_crate_types: HashMap>, - - /// Package downloader. - /// - /// This holds ownership of the `Package` objects. - pub packages: PackageSet<'cfg>, - - /// Information about rustc and the target platform. - pub target_data: RustcTargetData<'cfg>, - - /// The root units of `unit_graph` (units requested on the command-line). - pub roots: Vec, - - /// The dependency graph of units to compile. - pub unit_graph: UnitGraph, - - /// Reverse-dependencies of documented units, used by the rustdoc --scrape-examples flag. - pub scrape_units: Vec, - - /// The list of all kinds that are involved in this build - pub all_kinds: HashSet, -} - -impl<'a, 'cfg> BuildContext<'a, 'cfg> { - pub fn new( - ws: &'a Workspace<'cfg>, - packages: PackageSet<'cfg>, - build_config: &'a BuildConfig, - profiles: Profiles, - extra_compiler_args: HashMap>, - target_rustc_crate_types: HashMap>, - target_data: RustcTargetData<'cfg>, - roots: Vec, - unit_graph: UnitGraph, - scrape_units: Vec, - ) -> CargoResult> { - let all_kinds = unit_graph - .keys() - .map(|u| u.kind) - .chain(build_config.requested_kinds.iter().copied()) - .chain(std::iter::once(CompileKind::Host)) - .collect(); - - Ok(BuildContext { - ws, - config: ws.config(), - packages, - build_config, - profiles, - extra_compiler_args, - target_rustc_crate_types, - target_data, - roots, - unit_graph, - scrape_units, - all_kinds, - }) - } - - pub fn rustc(&self) -> &Rustc { - &self.target_data.rustc - } - - /// Gets the user-specified linker for a particular host or target. - pub fn linker(&self, kind: CompileKind) -> Option { - self.target_data - .target_config(kind) - .linker - .as_ref() - .map(|l| l.val.clone().resolve_program(self.config)) - } - - /// Gets the host architecture triple. - /// - /// For example, x86_64-unknown-linux-gnu, would be - /// - machine: x86_64, - /// - hardware-platform: unknown, - /// - operating system: linux-gnu. - pub fn host_triple(&self) -> InternedString { - self.target_data.rustc.host - } - - /// Gets the number of jobs specified for this build. - pub fn jobs(&self) -> u32 { - self.build_config.jobs - } - - pub fn rustflags_args(&self, unit: &Unit) -> &[String] { - &self.target_data.info(unit.kind).rustflags - } - - pub fn rustdocflags_args(&self, unit: &Unit) -> &[String] { - &self.target_data.info(unit.kind).rustdocflags - } - - pub fn extra_args_for(&self, unit: &Unit) -> Option<&Vec> { - self.extra_compiler_args.get(unit) - } - - pub fn rustc_crate_types_args_for(&self, unit: &Unit) -> Option<&Vec> { - self.target_rustc_crate_types.get(unit) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_context/target_info.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_context/target_info.rs deleted file mode 100644 index 505910d5f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_context/target_info.rs +++ /dev/null @@ -1,928 +0,0 @@ -use crate::core::compiler::{ - BuildOutput, CompileKind, CompileMode, CompileTarget, Context, CrateType, -}; -use crate::core::{Dependency, Target, TargetKind, Workspace}; -use crate::util::config::{Config, StringList, TargetConfig}; -use crate::util::{CargoResult, Rustc}; -use anyhow::Context as _; -use cargo_platform::{Cfg, CfgExpr}; -use cargo_util::{paths, ProcessBuilder}; -use serde::{Deserialize, Serialize}; -use std::cell::RefCell; -use std::collections::hash_map::{Entry, HashMap}; -use std::env; -use std::path::{Path, PathBuf}; -use std::str::{self, FromStr}; - -/// Information about the platform target gleaned from querying rustc. -/// -/// `RustcTargetData` keeps two of these, one for the host and one for the -/// target. If no target is specified, it uses a clone from the host. -#[derive(Clone)] -pub struct TargetInfo { - /// A base process builder for discovering crate type information. In - /// particular, this is used to determine the output filename prefix and - /// suffix for a crate type. - crate_type_process: ProcessBuilder, - /// Cache of output filename prefixes and suffixes. - /// - /// The key is the crate type name (like `cdylib`) and the value is - /// `Some((prefix, suffix))`, for example `libcargo.so` would be - /// `Some(("lib", ".so")). The value is `None` if the crate type is not - /// supported. - crate_types: RefCell>>, - /// `cfg` information extracted from `rustc --print=cfg`. - cfg: Vec, - /// Path to the sysroot. - pub sysroot: PathBuf, - /// Path to the "lib" or "bin" directory that rustc uses for its dynamic - /// libraries. - pub sysroot_host_libdir: PathBuf, - /// Path to the "lib" directory in the sysroot which rustc uses for linking - /// target libraries. - pub sysroot_target_libdir: PathBuf, - /// Extra flags to pass to `rustc`, see `env_args`. - pub rustflags: Vec, - /// Extra flags to pass to `rustdoc`, see `env_args`. - pub rustdocflags: Vec, - /// Whether or not rustc supports the `-Csplit-debuginfo` flag. - pub supports_split_debuginfo: bool, - /// Whether or not rustc supports the `--json future-incompat` flag. - pub supports_json_future_incompat: bool, -} - -/// Kind of each file generated by a Unit, part of `FileType`. -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum FileFlavor { - /// Not a special file type. - Normal, - /// Like `Normal`, but not directly executable. - /// For example, a `.wasm` file paired with the "normal" `.js` file. - Auxiliary, - /// Something you can link against (e.g., a library). - Linkable, - /// An `.rmeta` Rust metadata file. - Rmeta, - /// Piece of external debug information (e.g., `.dSYM`/`.pdb` file). - DebugInfo, -} - -/// Type of each file generated by a Unit. -#[derive(Debug)] -pub struct FileType { - /// The kind of file. - pub flavor: FileFlavor, - /// The crate-type that generates this file. - /// - /// `None` for things that aren't associated with a specific crate type, - /// for example `rmeta` files. - pub crate_type: Option, - /// The suffix for the file (for example, `.rlib`). - /// This is an empty string for executables on Unix-like platforms. - suffix: String, - /// The prefix for the file (for example, `lib`). - /// This is an empty string for things like executables. - prefix: String, - /// Flag to convert hyphen to underscore when uplifting. - should_replace_hyphens: bool, -} - -impl FileType { - /// The filename for this FileType crated by rustc. - pub fn output_filename(&self, target: &Target, metadata: Option<&str>) -> String { - match metadata { - Some(metadata) => format!( - "{}{}-{}{}", - self.prefix, - target.crate_name(), - metadata, - self.suffix - ), - None => format!("{}{}{}", self.prefix, target.crate_name(), self.suffix), - } - } - - /// The filename for this FileType that Cargo should use when "uplifting" - /// it to the destination directory. - pub fn uplift_filename(&self, target: &Target) -> String { - let name = match target.binary_filename() { - Some(name) => name, - None => { - // For binary crate type, `should_replace_hyphens` will always be false. - if self.should_replace_hyphens { - target.crate_name() - } else { - target.name().to_string() - } - } - }; - - format!("{}{}{}", self.prefix, name, self.suffix) - } - - /// Creates a new instance representing a `.rmeta` file. - pub fn new_rmeta() -> FileType { - // Note that even binaries use the `lib` prefix. - FileType { - flavor: FileFlavor::Rmeta, - crate_type: None, - suffix: ".rmeta".to_string(), - prefix: "lib".to_string(), - should_replace_hyphens: true, - } - } -} - -impl TargetInfo { - pub fn new( - config: &Config, - requested_kinds: &[CompileKind], - rustc: &Rustc, - kind: CompileKind, - ) -> CargoResult { - let rustflags = env_args( - config, - requested_kinds, - &rustc.host, - None, - kind, - "RUSTFLAGS", - )?; - let extra_fingerprint = kind.fingerprint_hash(); - let mut process = rustc.workspace_process(); - process - .arg("-") - .arg("--crate-name") - .arg("___") - .arg("--print=file-names") - .args(&rustflags) - .env_remove("RUSTC_LOG"); - - if let CompileKind::Target(target) = kind { - process.arg("--target").arg(target.rustc_target()); - } - - let crate_type_process = process.clone(); - const KNOWN_CRATE_TYPES: &[CrateType] = &[ - CrateType::Bin, - CrateType::Rlib, - CrateType::Dylib, - CrateType::Cdylib, - CrateType::Staticlib, - CrateType::ProcMacro, - ]; - for crate_type in KNOWN_CRATE_TYPES.iter() { - process.arg("--crate-type").arg(crate_type.as_str()); - } - let supports_split_debuginfo = rustc - .cached_output( - process.clone().arg("-Csplit-debuginfo=packed"), - extra_fingerprint, - ) - .is_ok(); - - let supports_json_future_incompat = rustc - .cached_output( - process - .clone() - .args(&["--error-format", "json", "--json", "future-incompat"]), - extra_fingerprint, - ) - .is_ok(); - - process.arg("--print=sysroot"); - process.arg("--print=cfg"); - - let (output, error) = rustc - .cached_output(&process, extra_fingerprint) - .with_context(|| "failed to run `rustc` to learn about target-specific information")?; - - let mut lines = output.lines(); - let mut map = HashMap::new(); - for crate_type in KNOWN_CRATE_TYPES { - let out = parse_crate_type(crate_type, &process, &output, &error, &mut lines)?; - map.insert(crate_type.clone(), out); - } - - let line = match lines.next() { - Some(line) => line, - None => anyhow::bail!( - "output of --print=sysroot missing when learning about \ - target-specific information from rustc\n{}", - output_err_info(&process, &output, &error) - ), - }; - let sysroot = PathBuf::from(line); - let sysroot_host_libdir = if cfg!(windows) { - sysroot.join("bin") - } else { - sysroot.join("lib") - }; - let mut sysroot_target_libdir = sysroot.clone(); - sysroot_target_libdir.push("lib"); - sysroot_target_libdir.push("rustlib"); - sysroot_target_libdir.push(match &kind { - CompileKind::Host => rustc.host.as_str(), - CompileKind::Target(target) => target.short_name(), - }); - sysroot_target_libdir.push("lib"); - - let cfg = lines - .map(|line| Ok(Cfg::from_str(line)?)) - .filter(TargetInfo::not_user_specific_cfg) - .collect::>>() - .with_context(|| { - format!( - "failed to parse the cfg from `rustc --print=cfg`, got:\n{}", - output - ) - })?; - - Ok(TargetInfo { - crate_type_process, - crate_types: RefCell::new(map), - sysroot, - sysroot_host_libdir, - sysroot_target_libdir, - // recalculate `rustflags` from above now that we have `cfg` - // information - rustflags: env_args( - config, - requested_kinds, - &rustc.host, - Some(&cfg), - kind, - "RUSTFLAGS", - )?, - rustdocflags: env_args( - config, - requested_kinds, - &rustc.host, - Some(&cfg), - kind, - "RUSTDOCFLAGS", - )?, - cfg, - supports_split_debuginfo, - supports_json_future_incompat, - }) - } - - fn not_user_specific_cfg(cfg: &CargoResult) -> bool { - if let Ok(Cfg::Name(cfg_name)) = cfg { - // This should also include "debug_assertions", but it causes - // regressions. Maybe some day in the distant future it can be - // added (and possibly change the warning to an error). - if cfg_name == "proc_macro" { - return false; - } - } - true - } - - /// All the target `cfg` settings. - pub fn cfg(&self) -> &[Cfg] { - &self.cfg - } - - /// Returns the list of file types generated by the given crate type. - /// - /// Returns `None` if the target does not support the given crate type. - fn file_types( - &self, - crate_type: &CrateType, - flavor: FileFlavor, - target_triple: &str, - ) -> CargoResult>> { - let crate_type = if *crate_type == CrateType::Lib { - CrateType::Rlib - } else { - crate_type.clone() - }; - - let mut crate_types = self.crate_types.borrow_mut(); - let entry = crate_types.entry(crate_type.clone()); - let crate_type_info = match entry { - Entry::Occupied(o) => &*o.into_mut(), - Entry::Vacant(v) => { - let value = self.discover_crate_type(v.key())?; - &*v.insert(value) - } - }; - let (prefix, suffix) = match *crate_type_info { - Some((ref prefix, ref suffix)) => (prefix, suffix), - None => return Ok(None), - }; - let mut ret = vec![FileType { - suffix: suffix.clone(), - prefix: prefix.clone(), - flavor, - crate_type: Some(crate_type.clone()), - should_replace_hyphens: crate_type != CrateType::Bin, - }]; - - // Window shared library import/export files. - if crate_type.is_dynamic() { - // Note: Custom JSON specs can alter the suffix. For now, we'll - // just ignore non-DLL suffixes. - if target_triple.ends_with("-windows-msvc") && suffix == ".dll" { - // See https://docs.microsoft.com/en-us/cpp/build/reference/working-with-import-libraries-and-export-files - // for more information about DLL import/export files. - ret.push(FileType { - suffix: ".dll.lib".to_string(), - prefix: prefix.clone(), - flavor: FileFlavor::Auxiliary, - crate_type: Some(crate_type.clone()), - should_replace_hyphens: true, - }); - // NOTE: lld does not produce these - ret.push(FileType { - suffix: ".dll.exp".to_string(), - prefix: prefix.clone(), - flavor: FileFlavor::Auxiliary, - crate_type: Some(crate_type.clone()), - should_replace_hyphens: true, - }); - } else if target_triple.ends_with("windows-gnu") && suffix == ".dll" { - // See https://cygwin.com/cygwin-ug-net/dll.html for more - // information about GNU import libraries. - // LD can link DLL directly, but LLD requires the import library. - ret.push(FileType { - suffix: ".dll.a".to_string(), - prefix: "lib".to_string(), - flavor: FileFlavor::Auxiliary, - crate_type: Some(crate_type.clone()), - should_replace_hyphens: true, - }) - } - } - - if target_triple.starts_with("wasm32-") && crate_type == CrateType::Bin && suffix == ".js" { - // emscripten binaries generate a .js file, which loads a .wasm - // file. - ret.push(FileType { - suffix: ".wasm".to_string(), - prefix: prefix.clone(), - flavor: FileFlavor::Auxiliary, - crate_type: Some(crate_type.clone()), - // Name `foo-bar` will generate a `foo_bar.js` and - // `foo_bar.wasm`. Cargo will translate the underscore and - // copy `foo_bar.js` to `foo-bar.js`. However, the wasm - // filename is embedded in the .js file with an underscore, so - // it should not contain hyphens. - should_replace_hyphens: true, - }); - // And a map file for debugging. This is only emitted with debug=2 - // (-g4 for emcc). - ret.push(FileType { - suffix: ".wasm.map".to_string(), - prefix: prefix.clone(), - flavor: FileFlavor::DebugInfo, - crate_type: Some(crate_type.clone()), - should_replace_hyphens: true, - }); - } - - // Handle separate debug files. - let is_apple = target_triple.contains("-apple-"); - if matches!( - crate_type, - CrateType::Bin | CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro - ) { - if is_apple { - let suffix = if crate_type == CrateType::Bin { - ".dSYM".to_string() - } else { - ".dylib.dSYM".to_string() - }; - ret.push(FileType { - suffix, - prefix: prefix.clone(), - flavor: FileFlavor::DebugInfo, - crate_type: Some(crate_type), - // macOS tools like lldb use all sorts of magic to locate - // dSYM files. See https://lldb.llvm.org/use/symbols.html - // for some details. It seems like a `.dSYM` located next - // to the executable with the same name is one method. The - // dSYM should have the same hyphens as the executable for - // the names to match. - should_replace_hyphens: false, - }) - } else if target_triple.ends_with("-msvc") { - ret.push(FileType { - suffix: ".pdb".to_string(), - prefix: prefix.clone(), - flavor: FileFlavor::DebugInfo, - crate_type: Some(crate_type), - // The absolute path to the pdb file is embedded in the - // executable. If the exe/pdb pair is moved to another - // machine, then debuggers will look in the same directory - // of the exe with the original pdb filename. Since the - // original name contains underscores, they need to be - // preserved. - should_replace_hyphens: true, - }) - } - } - - Ok(Some(ret)) - } - - fn discover_crate_type(&self, crate_type: &CrateType) -> CargoResult> { - let mut process = self.crate_type_process.clone(); - - process.arg("--crate-type").arg(crate_type.as_str()); - - let output = process.exec_with_output().with_context(|| { - format!( - "failed to run `rustc` to learn about crate-type {} information", - crate_type - ) - })?; - - let error = str::from_utf8(&output.stderr).unwrap(); - let output = str::from_utf8(&output.stdout).unwrap(); - parse_crate_type(crate_type, &process, output, error, &mut output.lines()) - } - - /// Returns all the file types generated by rustc for the given mode/target_kind. - /// - /// The first value is a Vec of file types generated, the second value is - /// a list of CrateTypes that are not supported by the given target. - pub fn rustc_outputs( - &self, - mode: CompileMode, - target_kind: &TargetKind, - target_triple: &str, - ) -> CargoResult<(Vec, Vec)> { - match mode { - CompileMode::Build => self.calc_rustc_outputs(target_kind, target_triple), - CompileMode::Test | CompileMode::Bench => { - match self.file_types(&CrateType::Bin, FileFlavor::Normal, target_triple)? { - Some(fts) => Ok((fts, Vec::new())), - None => Ok((Vec::new(), vec![CrateType::Bin])), - } - } - CompileMode::Check { .. } => Ok((vec![FileType::new_rmeta()], Vec::new())), - CompileMode::Doc { .. } - | CompileMode::Doctest - | CompileMode::Docscrape - | CompileMode::RunCustomBuild => { - panic!("asked for rustc output for non-rustc mode") - } - } - } - - fn calc_rustc_outputs( - &self, - target_kind: &TargetKind, - target_triple: &str, - ) -> CargoResult<(Vec, Vec)> { - let mut unsupported = Vec::new(); - let mut result = Vec::new(); - let crate_types = target_kind.rustc_crate_types(); - for crate_type in &crate_types { - let flavor = if crate_type.is_linkable() { - FileFlavor::Linkable - } else { - FileFlavor::Normal - }; - let file_types = self.file_types(crate_type, flavor, target_triple)?; - match file_types { - Some(types) => { - result.extend(types); - } - None => { - unsupported.push(crate_type.clone()); - } - } - } - if !result.is_empty() && !crate_types.iter().any(|ct| ct.requires_upstream_objects()) { - // Only add rmeta if pipelining. - result.push(FileType::new_rmeta()); - } - Ok((result, unsupported)) - } -} - -/// Takes rustc output (using specialized command line args), and calculates the file prefix and -/// suffix for the given crate type, or returns `None` if the type is not supported. (e.g., for a -/// Rust library like `libcargo.rlib`, we have prefix "lib" and suffix "rlib"). -/// -/// The caller needs to ensure that the lines object is at the correct line for the given crate -/// type: this is not checked. -/// -/// This function can not handle more than one file per type (with wasm32-unknown-emscripten, there -/// are two files for bin (`.wasm` and `.js`)). -fn parse_crate_type( - crate_type: &CrateType, - cmd: &ProcessBuilder, - output: &str, - error: &str, - lines: &mut str::Lines<'_>, -) -> CargoResult> { - let not_supported = error.lines().any(|line| { - (line.contains("unsupported crate type") || line.contains("unknown crate type")) - && line.contains(&format!("crate type `{}`", crate_type)) - }); - if not_supported { - return Ok(None); - } - let line = match lines.next() { - Some(line) => line, - None => anyhow::bail!( - "malformed output when learning about crate-type {} information\n{}", - crate_type, - output_err_info(cmd, output, error) - ), - }; - let mut parts = line.trim().split("___"); - let prefix = parts.next().unwrap(); - let suffix = match parts.next() { - Some(part) => part, - None => anyhow::bail!( - "output of --print=file-names has changed in the compiler, cannot parse\n{}", - output_err_info(cmd, output, error) - ), - }; - - Ok(Some((prefix.to_string(), suffix.to_string()))) -} - -/// Helper for creating an error message when parsing rustc output fails. -fn output_err_info(cmd: &ProcessBuilder, stdout: &str, stderr: &str) -> String { - let mut result = format!("command was: {}\n", cmd); - if !stdout.is_empty() { - result.push_str("\n--- stdout\n"); - result.push_str(stdout); - } - if !stderr.is_empty() { - result.push_str("\n--- stderr\n"); - result.push_str(stderr); - } - if stdout.is_empty() && stderr.is_empty() { - result.push_str("(no output received)"); - } - result -} - -/// Acquire extra flags to pass to the compiler from various locations. -/// -/// The locations are: -/// -/// - the `CARGO_ENCODED_RUSTFLAGS` environment variable -/// - the `RUSTFLAGS` environment variable -/// -/// then if this was not found -/// -/// - `target.*.rustflags` from the config (.cargo/config) -/// - `target.cfg(..).rustflags` from the config -/// -/// then if neither of these were found -/// -/// - `build.rustflags` from the config -/// -/// Note that if a `target` is specified, no args will be passed to host code (plugins, build -/// scripts, ...), even if it is the same as the target. -fn env_args( - config: &Config, - requested_kinds: &[CompileKind], - host_triple: &str, - target_cfg: Option<&[Cfg]>, - kind: CompileKind, - name: &str, -) -> CargoResult> { - // We *want* to apply RUSTFLAGS only to builds for the - // requested target architecture, and not to things like build - // scripts and plugins, which may be for an entirely different - // architecture. Cargo's present architecture makes it quite - // hard to only apply flags to things that are not build - // scripts and plugins though, so we do something more hacky - // instead to avoid applying the same RUSTFLAGS to multiple targets - // arches: - // - // 1) If --target is not specified we just apply RUSTFLAGS to - // all builds; they are all going to have the same target. - // - // 2) If --target *is* specified then we only apply RUSTFLAGS - // to compilation units with the Target kind, which indicates - // it was chosen by the --target flag. - // - // This means that, e.g., even if the specified --target is the - // same as the host, build scripts in plugins won't get - // RUSTFLAGS. - if requested_kinds != [CompileKind::Host] && kind.is_host() { - // This is probably a build script or plugin and we're - // compiling with --target. In this scenario there are - // no rustflags we can apply. - return Ok(Vec::new()); - } - - // First try CARGO_ENCODED_RUSTFLAGS from the environment. - // Prefer this over RUSTFLAGS since it's less prone to encoding errors. - if let Ok(a) = env::var(format!("CARGO_ENCODED_{}", name)) { - if a.is_empty() { - return Ok(Vec::new()); - } - return Ok(a.split('\x1f').map(str::to_string).collect()); - } - - // Then try RUSTFLAGS from the environment - if let Ok(a) = env::var(name) { - let args = a - .split(' ') - .map(str::trim) - .filter(|s| !s.is_empty()) - .map(str::to_string); - return Ok(args.collect()); - } - - let mut rustflags = Vec::new(); - - let name = name - .chars() - .flat_map(|c| c.to_lowercase()) - .collect::(); - // Then the target.*.rustflags value... - let target = match &kind { - CompileKind::Host => host_triple, - CompileKind::Target(target) => target.short_name(), - }; - let key = format!("target.{}.{}", target, name); - if let Some(args) = config.get::>(&key)? { - rustflags.extend(args.as_slice().iter().cloned()); - } - // ...including target.'cfg(...)'.rustflags - if let Some(target_cfg) = target_cfg { - config - .target_cfgs()? - .iter() - .filter_map(|(key, cfg)| { - cfg.rustflags - .as_ref() - .map(|rustflags| (key, &rustflags.val)) - }) - .filter(|(key, _rustflags)| CfgExpr::matches_key(key, target_cfg)) - .for_each(|(_key, cfg_rustflags)| { - rustflags.extend(cfg_rustflags.as_slice().iter().cloned()); - }); - } - - if !rustflags.is_empty() { - return Ok(rustflags); - } - - // Then the `build.rustflags` value. - let build = config.build_config()?; - let list = if name == "rustflags" { - &build.rustflags - } else { - &build.rustdocflags - }; - if let Some(list) = list { - return Ok(list.as_slice().to_vec()); - } - - Ok(Vec::new()) -} - -/// Collection of information about `rustc` and the host and target. -pub struct RustcTargetData<'cfg> { - /// Information about `rustc` itself. - pub rustc: Rustc, - - /// Config - config: &'cfg Config, - requested_kinds: Vec, - - /// Build information for the "host", which is information about when - /// `rustc` is invoked without a `--target` flag. This is used for - /// procedural macros, build scripts, etc. - host_config: TargetConfig, - host_info: TargetInfo, - - /// Build information for targets that we're building for. This will be - /// empty if the `--target` flag is not passed. - target_config: HashMap, - target_info: HashMap, -} - -impl<'cfg> RustcTargetData<'cfg> { - pub fn new( - ws: &Workspace<'cfg>, - requested_kinds: &[CompileKind], - ) -> CargoResult> { - let config = ws.config(); - let rustc = config.load_global_rustc(Some(ws))?; - let mut target_config = HashMap::new(); - let mut target_info = HashMap::new(); - let target_applies_to_host = config.target_applies_to_host()?; - let host_info = TargetInfo::new(config, requested_kinds, &rustc, CompileKind::Host)?; - let host_config = if target_applies_to_host { - config.target_cfg_triple(&rustc.host)? - } else { - config.host_cfg_triple(&rustc.host)? - }; - - // This is a hack. The unit_dependency graph builder "pretends" that - // `CompileKind::Host` is `CompileKind::Target(host)` if the - // `--target` flag is not specified. Since the unit_dependency code - // needs access to the target config data, create a copy so that it - // can be found. See `rebuild_unit_graph_shared` for why this is done. - if requested_kinds.iter().any(CompileKind::is_host) { - let ct = CompileTarget::new(&rustc.host)?; - target_info.insert(ct, host_info.clone()); - target_config.insert(ct, config.target_cfg_triple(&rustc.host)?); - }; - - let mut res = RustcTargetData { - rustc, - config, - requested_kinds: requested_kinds.into(), - host_config, - host_info, - target_config, - target_info, - }; - - // Get all kinds we currently know about. - // - // For now, targets can only ever come from the root workspace - // units as artifact dependencies are not a thing yet, so this - // correctly represents all the kinds that can happen. When we - // have artifact dependencies or other ways for targets to - // appear at places that are not the root units, we may have - // to revisit this. - let all_kinds = requested_kinds - .iter() - .copied() - .chain(ws.members().flat_map(|p| { - p.manifest() - .default_kind() - .into_iter() - .chain(p.manifest().forced_kind()) - })); - for kind in all_kinds { - if let CompileKind::Target(target) = kind { - if !res.target_config.contains_key(&target) { - res.target_config - .insert(target, res.config.target_cfg_triple(target.short_name())?); - } - if !res.target_info.contains_key(&target) { - res.target_info.insert( - target, - TargetInfo::new(res.config, &res.requested_kinds, &res.rustc, kind)?, - ); - } - } - } - - Ok(res) - } - - /// Returns a "short" name for the given kind, suitable for keying off - /// configuration in Cargo or presenting to users. - pub fn short_name<'a>(&'a self, kind: &'a CompileKind) -> &'a str { - match kind { - CompileKind::Host => &self.rustc.host, - CompileKind::Target(target) => target.short_name(), - } - } - - /// Whether a dependency should be compiled for the host or target platform, - /// specified by `CompileKind`. - pub fn dep_platform_activated(&self, dep: &Dependency, kind: CompileKind) -> bool { - // If this dependency is only available for certain platforms, - // make sure we're only enabling it for that platform. - let platform = match dep.platform() { - Some(p) => p, - None => return true, - }; - let name = self.short_name(&kind); - platform.matches(name, self.cfg(kind)) - } - - /// Gets the list of `cfg`s printed out from the compiler for the specified kind. - pub fn cfg(&self, kind: CompileKind) -> &[Cfg] { - self.info(kind).cfg() - } - - /// Information about the given target platform, learned by querying rustc. - pub fn info(&self, kind: CompileKind) -> &TargetInfo { - match kind { - CompileKind::Host => &self.host_info, - CompileKind::Target(s) => &self.target_info[&s], - } - } - - /// Gets the target configuration for a particular host or target. - pub fn target_config(&self, kind: CompileKind) -> &TargetConfig { - match kind { - CompileKind::Host => &self.host_config, - CompileKind::Target(s) => &self.target_config[&s], - } - } - - /// If a build script is overridden, this returns the `BuildOutput` to use. - /// - /// `lib_name` is the `links` library name and `kind` is whether it is for - /// Host or Target. - pub fn script_override(&self, lib_name: &str, kind: CompileKind) -> Option<&BuildOutput> { - self.target_config(kind).links_overrides.get(lib_name) - } -} - -/// Structure used to deal with Rustdoc fingerprinting -#[derive(Debug, Serialize, Deserialize)] -pub struct RustDocFingerprint { - pub rustc_vv: String, -} - -impl RustDocFingerprint { - /// This function checks whether the latest version of `Rustc` used to compile this - /// `Workspace`'s docs was the same as the one is currently being used in this `cargo doc` - /// call. - /// - /// In case it's not, it takes care of removing the `doc/` folder as well as overwriting - /// the rustdoc fingerprint info in order to guarantee that we won't end up with mixed - /// versions of the `js/html/css` files that `rustdoc` autogenerates which do not have - /// any versioning. - pub fn check_rustdoc_fingerprint(cx: &Context<'_, '_>) -> CargoResult<()> { - if cx.bcx.config.cli_unstable().skip_rustdoc_fingerprint { - return Ok(()); - } - let actual_rustdoc_target_data = RustDocFingerprint { - rustc_vv: cx.bcx.rustc().verbose_version.clone(), - }; - - let fingerprint_path = cx.files().host_root().join(".rustdoc_fingerprint.json"); - let write_fingerprint = || -> CargoResult<()> { - paths::write( - &fingerprint_path, - serde_json::to_string(&actual_rustdoc_target_data)?, - ) - }; - let rustdoc_data = match paths::read(&fingerprint_path) { - Ok(rustdoc_data) => rustdoc_data, - // If the fingerprint does not exist, do not clear out the doc - // directories. Otherwise this ran into problems where projects - // like rustbuild were creating the doc directory before running - // `cargo doc` in a way that deleting it would break it. - Err(_) => return write_fingerprint(), - }; - match serde_json::from_str::(&rustdoc_data) { - Ok(fingerprint) => { - if fingerprint.rustc_vv == actual_rustdoc_target_data.rustc_vv { - return Ok(()); - } else { - log::debug!( - "doc fingerprint changed:\noriginal:\n{}\nnew:\n{}", - fingerprint.rustc_vv, - actual_rustdoc_target_data.rustc_vv - ); - } - } - Err(e) => { - log::debug!("could not deserialize {:?}: {}", fingerprint_path, e); - } - }; - // Fingerprint does not match, delete the doc directories and write a new fingerprint. - log::debug!( - "fingerprint {:?} mismatch, clearing doc directories", - fingerprint_path - ); - cx.bcx - .all_kinds - .iter() - .map(|kind| cx.files().layout(*kind).doc()) - .filter(|path| path.exists()) - .try_for_each(|path| clean_doc(path))?; - write_fingerprint()?; - return Ok(()); - - fn clean_doc(path: &Path) -> CargoResult<()> { - let entries = path - .read_dir() - .with_context(|| format!("failed to read directory `{}`", path.display()))?; - for entry in entries { - let entry = entry?; - // Don't remove hidden files. Rustdoc does not create them, - // but the user might have. - if entry - .file_name() - .to_str() - .map_or(false, |name| name.starts_with('.')) - { - continue; - } - let path = entry.path(); - if entry.file_type()?.is_dir() { - paths::remove_dir_all(path)?; - } else { - paths::remove_file(path)?; - } - } - Ok(()) - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_plan.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_plan.rs deleted file mode 100644 index 6ffe24a27..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/build_plan.rs +++ /dev/null @@ -1,163 +0,0 @@ -//! A graph-like structure used to represent the rustc commands to build the package and the -//! interdependencies between them. -//! -//! The BuildPlan structure is used to store the dependency graph of a dry run so that it can be -//! shared with an external build system. Each Invocation in the BuildPlan comprises a single -//! subprocess and defines the build environment, the outputs produced by the subprocess, and the -//! dependencies on other Invocations. - -use std::collections::BTreeMap; -use std::path::{Path, PathBuf}; - -use serde::Serialize; - -use super::context::OutputFile; -use super::{CompileKind, CompileMode, Context, Unit}; -use crate::core::TargetKind; -use crate::util::{internal, CargoResult, Config}; -use cargo_util::ProcessBuilder; - -#[derive(Debug, Serialize)] -struct Invocation { - package_name: String, - package_version: semver::Version, - target_kind: TargetKind, - kind: CompileKind, - compile_mode: CompileMode, - deps: Vec, - outputs: Vec, - links: BTreeMap, - program: String, - args: Vec, - env: BTreeMap, - cwd: Option, -} - -#[derive(Debug)] -pub struct BuildPlan { - invocation_map: BTreeMap, - plan: SerializedBuildPlan, -} - -#[derive(Debug, Serialize)] -struct SerializedBuildPlan { - invocations: Vec, - inputs: Vec, -} - -impl Invocation { - pub fn new(unit: &Unit, deps: Vec) -> Invocation { - let id = unit.pkg.package_id(); - Invocation { - package_name: id.name().to_string(), - package_version: id.version().clone(), - kind: unit.kind, - target_kind: unit.target.kind().clone(), - compile_mode: unit.mode, - deps, - outputs: Vec::new(), - links: BTreeMap::new(), - program: String::new(), - args: Vec::new(), - env: BTreeMap::new(), - cwd: None, - } - } - - pub fn add_output(&mut self, path: &Path, link: &Option) { - self.outputs.push(path.to_path_buf()); - if let Some(ref link) = *link { - self.links.insert(link.clone(), path.to_path_buf()); - } - } - - pub fn update_cmd(&mut self, cmd: &ProcessBuilder) -> CargoResult<()> { - self.program = cmd - .get_program() - .to_str() - .ok_or_else(|| anyhow::format_err!("unicode program string required"))? - .to_string(); - self.cwd = Some(cmd.get_cwd().unwrap().to_path_buf()); - for arg in cmd.get_args().iter() { - self.args.push( - arg.to_str() - .ok_or_else(|| anyhow::format_err!("unicode argument string required"))? - .to_string(), - ); - } - for (var, value) in cmd.get_envs() { - let value = match value { - Some(s) => s, - None => continue, - }; - self.env.insert( - var.clone(), - value - .to_str() - .ok_or_else(|| anyhow::format_err!("unicode environment value required"))? - .to_string(), - ); - } - Ok(()) - } -} - -impl BuildPlan { - pub fn new() -> BuildPlan { - BuildPlan { - invocation_map: BTreeMap::new(), - plan: SerializedBuildPlan::new(), - } - } - - pub fn add(&mut self, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> { - let id = self.plan.invocations.len(); - self.invocation_map.insert(unit.buildkey(), id); - let deps = cx - .unit_deps(unit) - .iter() - .map(|dep| self.invocation_map[&dep.unit.buildkey()]) - .collect(); - let invocation = Invocation::new(unit, deps); - self.plan.invocations.push(invocation); - Ok(()) - } - - pub fn update( - &mut self, - invocation_name: &str, - cmd: &ProcessBuilder, - outputs: &[OutputFile], - ) -> CargoResult<()> { - let id = self.invocation_map[invocation_name]; - let invocation = - self.plan.invocations.get_mut(id).ok_or_else(|| { - internal(format!("couldn't find invocation for {}", invocation_name)) - })?; - - invocation.update_cmd(cmd)?; - for output in outputs.iter() { - invocation.add_output(&output.path, &output.hardlink); - } - - Ok(()) - } - - pub fn set_inputs(&mut self, inputs: Vec) { - self.plan.inputs = inputs; - } - - pub fn output_plan(self, config: &Config) { - let encoded = serde_json::to_string(&self.plan).unwrap(); - crate::drop_println!(config, "{}", encoded); - } -} - -impl SerializedBuildPlan { - pub fn new() -> SerializedBuildPlan { - SerializedBuildPlan { - invocations: Vec::new(), - inputs: Vec::new(), - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/compilation.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/compilation.rs deleted file mode 100644 index 3b21e4f43..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/compilation.rs +++ /dev/null @@ -1,419 +0,0 @@ -use std::collections::{BTreeSet, HashMap}; -use std::env; -use std::ffi::{OsStr, OsString}; -use std::path::PathBuf; - -use cargo_platform::CfgExpr; -use cargo_util::{paths, ProcessBuilder}; - -use super::BuildContext; -use crate::core::compiler::{CompileKind, Metadata, Unit}; -use crate::core::Package; -use crate::util::{config, CargoResult, Config}; - -/// Structure with enough information to run `rustdoc --test`. -pub struct Doctest { - /// What's being doctested - pub unit: Unit, - /// Arguments needed to pass to rustdoc to run this test. - pub args: Vec, - /// Whether or not -Zunstable-options is needed. - pub unstable_opts: bool, - /// The -Clinker value to use. - pub linker: Option, - /// The script metadata, if this unit's package has a build script. - /// - /// This is used for indexing [`Compilation::extra_env`]. - pub script_meta: Option, -} - -/// Information about the output of a unit. -#[derive(Ord, PartialOrd, Eq, PartialEq)] -pub struct UnitOutput { - /// The unit that generated this output. - pub unit: Unit, - /// Path to the unit's primary output (an executable or cdylib). - pub path: PathBuf, - /// The script metadata, if this unit's package has a build script. - /// - /// This is used for indexing [`Compilation::extra_env`]. - pub script_meta: Option, -} - -/// A structure returning the result of a compilation. -pub struct Compilation<'cfg> { - /// An array of all tests created during this compilation. - pub tests: Vec, - - /// An array of all binaries created. - pub binaries: Vec, - - /// An array of all cdylibs created. - pub cdylibs: Vec, - - /// The crate names of the root units specified on the command-line. - pub root_crate_names: Vec, - - /// All directories for the output of native build commands. - /// - /// This is currently used to drive some entries which are added to the - /// LD_LIBRARY_PATH as appropriate. - /// - /// The order should be deterministic. - pub native_dirs: BTreeSet, - - /// Root output directory (for the local package's artifacts) - pub root_output: HashMap, - - /// Output directory for rust dependencies. - /// May be for the host or for a specific target. - pub deps_output: HashMap, - - /// The path to the host libdir for the compiler used - sysroot_host_libdir: PathBuf, - - /// The path to libstd for each target - sysroot_target_libdir: HashMap, - - /// Extra environment variables that were passed to compilations and should - /// be passed to future invocations of programs. - /// - /// The key is the build script metadata for uniquely identifying the - /// `RunCustomBuild` unit that generated these env vars. - pub extra_env: HashMap>, - - /// Libraries to test with rustdoc. - pub to_doc_test: Vec, - - /// The target host triple. - pub host: String, - - config: &'cfg Config, - - /// Rustc process to be used by default - rustc_process: ProcessBuilder, - /// Rustc process to be used for workspace crates instead of rustc_process - rustc_workspace_wrapper_process: ProcessBuilder, - /// Optional rustc process to be used for primary crates instead of either rustc_process or - /// rustc_workspace_wrapper_process - primary_rustc_process: Option, - - target_runners: HashMap)>>, -} - -impl<'cfg> Compilation<'cfg> { - pub fn new<'a>(bcx: &BuildContext<'a, 'cfg>) -> CargoResult> { - let mut rustc = bcx.rustc().process(); - let mut primary_rustc_process = bcx.build_config.primary_unit_rustc.clone(); - let mut rustc_workspace_wrapper_process = bcx.rustc().workspace_process(); - - if bcx.config.extra_verbose() { - rustc.display_env_vars(); - rustc_workspace_wrapper_process.display_env_vars(); - - if let Some(rustc) = primary_rustc_process.as_mut() { - rustc.display_env_vars(); - } - } - - Ok(Compilation { - // TODO: deprecated; remove. - native_dirs: BTreeSet::new(), - root_output: HashMap::new(), - deps_output: HashMap::new(), - sysroot_host_libdir: bcx - .target_data - .info(CompileKind::Host) - .sysroot_host_libdir - .clone(), - sysroot_target_libdir: bcx - .all_kinds - .iter() - .map(|&kind| { - ( - kind, - bcx.target_data.info(kind).sysroot_target_libdir.clone(), - ) - }) - .collect(), - tests: Vec::new(), - binaries: Vec::new(), - cdylibs: Vec::new(), - root_crate_names: Vec::new(), - extra_env: HashMap::new(), - to_doc_test: Vec::new(), - config: bcx.config, - host: bcx.host_triple().to_string(), - rustc_process: rustc, - rustc_workspace_wrapper_process, - primary_rustc_process, - target_runners: bcx - .build_config - .requested_kinds - .iter() - .chain(Some(&CompileKind::Host)) - .map(|kind| Ok((*kind, target_runner(bcx, *kind)?))) - .collect::>>()?, - }) - } - - /// Returns a [`ProcessBuilder`] for running `rustc`. - /// - /// `is_primary` is true if this is a "primary package", which means it - /// was selected by the user on the command-line (such as with a `-p` - /// flag), see [`crate::core::compiler::Context::primary_packages`]. - /// - /// `is_workspace` is true if this is a workspace member. - pub fn rustc_process( - &self, - unit: &Unit, - is_primary: bool, - is_workspace: bool, - ) -> CargoResult { - let rustc = if is_primary && self.primary_rustc_process.is_some() { - self.primary_rustc_process.clone().unwrap() - } else if is_workspace { - self.rustc_workspace_wrapper_process.clone() - } else { - self.rustc_process.clone() - }; - - let cmd = fill_rustc_tool_env(rustc, unit); - self.fill_env(cmd, &unit.pkg, None, unit.kind, true) - } - - /// Returns a [`ProcessBuilder`] for running `rustdoc`. - pub fn rustdoc_process( - &self, - unit: &Unit, - script_meta: Option, - ) -> CargoResult { - let rustdoc = ProcessBuilder::new(&*self.config.rustdoc()?); - let cmd = fill_rustc_tool_env(rustdoc, unit); - let mut p = self.fill_env(cmd, &unit.pkg, script_meta, unit.kind, true)?; - unit.target.edition().cmd_edition_arg(&mut p); - - for crate_type in unit.target.rustc_crate_types() { - p.arg("--crate-type").arg(crate_type.as_str()); - } - - Ok(p) - } - - /// Returns a [`ProcessBuilder`] appropriate for running a process for the - /// host platform. - /// - /// This is currently only used for running build scripts. If you use this - /// for anything else, please be extra careful on how environment - /// variables are set! - pub fn host_process>( - &self, - cmd: T, - pkg: &Package, - ) -> CargoResult { - self.fill_env( - ProcessBuilder::new(cmd), - pkg, - None, - CompileKind::Host, - false, - ) - } - - pub fn target_runner(&self, kind: CompileKind) -> Option<&(PathBuf, Vec)> { - self.target_runners.get(&kind).and_then(|x| x.as_ref()) - } - - /// Returns a [`ProcessBuilder`] appropriate for running a process for the - /// target platform. This is typically used for `cargo run` and `cargo - /// test`. - /// - /// `script_meta` is the metadata for the `RunCustomBuild` unit that this - /// unit used for its build script. Use `None` if the package did not have - /// a build script. - pub fn target_process>( - &self, - cmd: T, - kind: CompileKind, - pkg: &Package, - script_meta: Option, - ) -> CargoResult { - let builder = if let Some((runner, args)) = self.target_runner(kind) { - let mut builder = ProcessBuilder::new(runner); - builder.args(args); - builder.arg(cmd); - builder - } else { - ProcessBuilder::new(cmd) - }; - self.fill_env(builder, pkg, script_meta, kind, false) - } - - /// Prepares a new process with an appropriate environment to run against - /// the artifacts produced by the build process. - /// - /// The package argument is also used to configure environment variables as - /// well as the working directory of the child process. - fn fill_env( - &self, - mut cmd: ProcessBuilder, - pkg: &Package, - script_meta: Option, - kind: CompileKind, - is_rustc_tool: bool, - ) -> CargoResult { - let mut search_path = Vec::new(); - if is_rustc_tool { - search_path.push(self.deps_output[&CompileKind::Host].clone()); - search_path.push(self.sysroot_host_libdir.clone()); - } else { - search_path.extend(super::filter_dynamic_search_path( - self.native_dirs.iter(), - &self.root_output[&kind], - )); - search_path.push(self.deps_output[&kind].clone()); - search_path.push(self.root_output[&kind].clone()); - // For build-std, we don't want to accidentally pull in any shared - // libs from the sysroot that ships with rustc. This may not be - // required (at least I cannot craft a situation where it - // matters), but is here to be safe. - if self.config.cli_unstable().build_std.is_none() { - search_path.push(self.sysroot_target_libdir[&kind].clone()); - } - } - - let dylib_path = paths::dylib_path(); - let dylib_path_is_empty = dylib_path.is_empty(); - search_path.extend(dylib_path.into_iter()); - if cfg!(target_os = "macos") && dylib_path_is_empty { - // These are the defaults when DYLD_FALLBACK_LIBRARY_PATH isn't - // set or set to an empty string. Since Cargo is explicitly setting - // the value, make sure the defaults still work. - if let Some(home) = env::var_os("HOME") { - search_path.push(PathBuf::from(home).join("lib")); - } - search_path.push(PathBuf::from("/usr/local/lib")); - search_path.push(PathBuf::from("/usr/lib")); - } - let search_path = paths::join_paths(&search_path, paths::dylib_path_envvar())?; - - cmd.env(paths::dylib_path_envvar(), &search_path); - if let Some(meta) = script_meta { - if let Some(env) = self.extra_env.get(&meta) { - for (k, v) in env { - cmd.env(k, v); - } - } - } - - let metadata = pkg.manifest().metadata(); - - let cargo_exe = self.config.cargo_exe()?; - cmd.env(crate::CARGO_ENV, cargo_exe); - - // When adding new environment variables depending on - // crate properties which might require rebuild upon change - // consider adding the corresponding properties to the hash - // in BuildContext::target_metadata() - cmd.env("CARGO_MANIFEST_DIR", pkg.root()) - .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) - .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) - .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) - .env("CARGO_PKG_VERSION_PRE", pkg.version().pre.as_str()) - .env("CARGO_PKG_VERSION", &pkg.version().to_string()) - .env("CARGO_PKG_NAME", &*pkg.name()) - .env( - "CARGO_PKG_DESCRIPTION", - metadata.description.as_ref().unwrap_or(&String::new()), - ) - .env( - "CARGO_PKG_HOMEPAGE", - metadata.homepage.as_ref().unwrap_or(&String::new()), - ) - .env( - "CARGO_PKG_REPOSITORY", - metadata.repository.as_ref().unwrap_or(&String::new()), - ) - .env( - "CARGO_PKG_LICENSE", - metadata.license.as_ref().unwrap_or(&String::new()), - ) - .env( - "CARGO_PKG_LICENSE_FILE", - metadata.license_file.as_ref().unwrap_or(&String::new()), - ) - .env("CARGO_PKG_AUTHORS", &pkg.authors().join(":")) - .cwd(pkg.root()); - - // Apply any environment variables from the config - for (key, value) in self.config.env_config()?.iter() { - // never override a value that has already been set by cargo - if cmd.get_envs().contains_key(key) { - continue; - } - - if value.is_force() || env::var_os(key).is_none() { - cmd.env(key, value.resolve(self.config)); - } - } - - Ok(cmd) - } -} - -/// Prepares a rustc_tool process with additional environment variables -/// that are only relevant in a context that has a unit -fn fill_rustc_tool_env(mut cmd: ProcessBuilder, unit: &Unit) -> ProcessBuilder { - if unit.target.is_bin() { - let name = unit - .target - .binary_filename() - .unwrap_or(unit.target.name().to_string()); - - cmd.env("CARGO_BIN_NAME", name); - } - cmd.env("CARGO_CRATE_NAME", unit.target.crate_name()); - cmd -} - -fn target_runner( - bcx: &BuildContext<'_, '_>, - kind: CompileKind, -) -> CargoResult)>> { - let target = bcx.target_data.short_name(&kind); - - // try target.{}.runner - let key = format!("target.{}.runner", target); - - if let Some(v) = bcx.config.get::>(&key)? { - let path = v.path.resolve_program(bcx.config); - return Ok(Some((path, v.args))); - } - - // try target.'cfg(...)'.runner - let target_cfg = bcx.target_data.info(kind).cfg(); - let mut cfgs = bcx - .config - .target_cfgs()? - .iter() - .filter_map(|(key, cfg)| cfg.runner.as_ref().map(|runner| (key, runner))) - .filter(|(key, _runner)| CfgExpr::matches_key(key, target_cfg)); - let matching_runner = cfgs.next(); - if let Some((key, runner)) = cfgs.next() { - anyhow::bail!( - "several matching instances of `target.'cfg(..)'.runner` in `.cargo/config`\n\ - first match `{}` located in {}\n\ - second match `{}` located in {}", - matching_runner.unwrap().0, - matching_runner.unwrap().1.definition, - key, - runner.definition - ); - } - Ok(matching_runner.map(|(_k, runner)| { - ( - runner.val.path.clone().resolve_program(bcx.config), - runner.val.args.clone(), - ) - })) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/compile_kind.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/compile_kind.rs deleted file mode 100644 index adfa55fce..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/compile_kind.rs +++ /dev/null @@ -1,198 +0,0 @@ -use crate::core::Target; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::{Config, StableHasher}; -use anyhow::{bail, Context as _}; -use serde::Serialize; -use std::collections::BTreeSet; -use std::fs; -use std::hash::{Hash, Hasher}; -use std::path::Path; - -/// Indicator for how a unit is being compiled. -/// -/// This is used primarily for organizing cross compilations vs host -/// compilations, where cross compilations happen at the request of `--target` -/// and host compilations happen for things like build scripts and procedural -/// macros. -#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord)] -pub enum CompileKind { - /// Attached to a unit that is compiled for the "host" system or otherwise - /// is compiled without a `--target` flag. This is used for procedural - /// macros and build scripts, or if the `--target` flag isn't passed. - Host, - - /// Attached to a unit to be compiled for a particular target. This is used - /// for units when the `--target` flag is passed. - Target(CompileTarget), -} - -impl CompileKind { - pub fn is_host(&self) -> bool { - matches!(self, CompileKind::Host) - } - - pub fn for_target(self, target: &Target) -> CompileKind { - // Once we start compiling for the `Host` kind we continue doing so, but - // if we are a `Target` kind and then we start compiling for a target - // that needs to be on the host we lift ourselves up to `Host`. - match self { - CompileKind::Host => CompileKind::Host, - CompileKind::Target(_) if target.for_host() => CompileKind::Host, - CompileKind::Target(n) => CompileKind::Target(n), - } - } - - /// Creates a new list of `CompileKind` based on the requested list of - /// targets. - /// - /// If no targets are given then this returns a single-element vector with - /// `CompileKind::Host`. - pub fn from_requested_targets( - config: &Config, - targets: &[String], - ) -> CargoResult> { - if targets.len() > 1 && !config.cli_unstable().multitarget { - bail!("specifying multiple `--target` flags requires `-Zmultitarget`") - } - if !targets.is_empty() { - return Ok(targets - .iter() - .map(|value| Ok(CompileKind::Target(CompileTarget::new(value)?))) - // First collect into a set to deduplicate any `--target` passed - // more than once... - .collect::>>()? - // ... then generate a flat list for everything else to use. - .into_iter() - .collect()); - } - let kind = match &config.build_config()?.target { - Some(val) => { - let value = if val.raw_value().ends_with(".json") { - let path = val.clone().resolve_path(config); - path.to_str().expect("must be utf-8 in toml").to_string() - } else { - val.raw_value().to_string() - }; - CompileKind::Target(CompileTarget::new(&value)?) - } - None => CompileKind::Host, - }; - Ok(vec![kind]) - } - - /// Hash used for fingerprinting. - /// - /// Metadata hashing uses the normal Hash trait, which does not - /// differentiate on `.json` file contents. The fingerprint hash does - /// check the contents. - pub fn fingerprint_hash(&self) -> u64 { - match self { - CompileKind::Host => 0, - CompileKind::Target(target) => target.fingerprint_hash(), - } - } -} - -impl serde::ser::Serialize for CompileKind { - fn serialize(&self, s: S) -> Result - where - S: serde::ser::Serializer, - { - match self { - CompileKind::Host => None::<&str>.serialize(s), - CompileKind::Target(t) => Some(t.name).serialize(s), - } - } -} - -/// Abstraction for the representation of a compilation target that Cargo has. -/// -/// Compilation targets are one of two things right now: -/// -/// 1. A raw target string, like `x86_64-unknown-linux-gnu`. -/// 2. The path to a JSON file, such as `/path/to/my-target.json`. -/// -/// Raw target strings are typically dictated by `rustc` itself and represent -/// built-in targets. Custom JSON files are somewhat unstable, but supported -/// here in Cargo. Note that for JSON target files this `CompileTarget` stores a -/// full canonicalized path to the target. -/// -/// The main reason for this existence is to handle JSON target files where when -/// we call rustc we pass full paths but when we use it for Cargo's purposes -/// like naming directories or looking up configuration keys we only check the -/// file stem of JSON target files. For built-in rustc targets this is just an -/// uninterpreted string basically. -#[derive(PartialEq, Eq, Hash, Debug, Clone, Copy, PartialOrd, Ord, Serialize)] -pub struct CompileTarget { - name: InternedString, -} - -impl CompileTarget { - pub fn new(name: &str) -> CargoResult { - let name = name.trim(); - if name.is_empty() { - anyhow::bail!("target was empty"); - } - if !name.ends_with(".json") { - return Ok(CompileTarget { name: name.into() }); - } - - // If `name` ends in `.json` then it's likely a custom target - // specification. Canonicalize the path to ensure that different builds - // with different paths always produce the same result. - let path = Path::new(name) - .canonicalize() - .with_context(|| format!("target path {:?} is not a valid file", name))?; - - let name = path - .into_os_string() - .into_string() - .map_err(|_| anyhow::format_err!("target path is not valid unicode"))?; - Ok(CompileTarget { name: name.into() }) - } - - /// Returns the full unqualified name of this target, suitable for passing - /// to `rustc` directly. - /// - /// Typically this is pretty much the same as `short_name`, but for the case - /// of JSON target files this will be a full canonicalized path name for the - /// current filesystem. - pub fn rustc_target(&self) -> &str { - &self.name - } - - /// Returns a "short" version of the target name suitable for usage within - /// Cargo for configuration and such. - /// - /// This is typically the same as `rustc_target`, or the full name, but for - /// JSON target files this returns just the file stem (e.g. `foo` out of - /// `foo.json`) instead of the full path. - pub fn short_name(&self) -> &str { - // Flexible target specifications often point at json files, so if it - // looks like we've got one of those just use the file stem (the file - // name without ".json") as a short name for this target. Note that the - // `unwrap()` here should never trigger since we have a nonempty name - // and it starts as utf-8 so it's always utf-8 - if self.name.ends_with(".json") { - Path::new(&self.name).file_stem().unwrap().to_str().unwrap() - } else { - &self.name - } - } - - /// See [`CompileKind::fingerprint_hash`]. - pub fn fingerprint_hash(&self) -> u64 { - let mut hasher = StableHasher::new(); - self.name.hash(&mut hasher); - if self.name.ends_with(".json") { - // This may have some performance concerns, since it is called - // fairly often. If that ever seems worth fixing, consider - // embedding this in `CompileTarget`. - if let Ok(contents) = fs::read_to_string(self.name) { - contents.hash(&mut hasher); - } - } - hasher.finish() - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/context/compilation_files.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/context/compilation_files.rs deleted file mode 100644 index 37ab25202..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/context/compilation_files.rs +++ /dev/null @@ -1,674 +0,0 @@ -use std::collections::HashMap; -use std::env; -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use lazycell::LazyCell; -use log::info; - -use super::{BuildContext, CompileKind, Context, FileFlavor, Layout}; -use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit}; -use crate::core::{Target, TargetKind, Workspace}; -use crate::util::{self, CargoResult, StableHasher}; - -/// This is a generic version number that can be changed to make -/// backwards-incompatible changes to any file structures in the output -/// directory. For example, the fingerprint files or the build-script -/// output files. Normally cargo updates ship with rustc updates which will -/// cause a new hash due to the rustc version changing, but this allows -/// cargo to be extra careful to deal with different versions of cargo that -/// use the same rustc version. -const METADATA_VERSION: u8 = 2; - -/// The `Metadata` is a hash used to make unique file names for each unit in a -/// build. It is also use for symbol mangling. -/// -/// For example: -/// - A project may depend on crate `A` and crate `B`, so the package name must be in the file name. -/// - Similarly a project may depend on two versions of `A`, so the version must be in the file name. -/// -/// In general this must include all things that need to be distinguished in different parts of -/// the same build. This is absolutely required or we override things before -/// we get chance to use them. -/// -/// It is also used for symbol mangling, because if you have two versions of -/// the same crate linked together, their symbols need to be differentiated. -/// -/// We use a hash because it is an easy way to guarantee -/// that all the inputs can be converted to a valid path. -/// -/// This also acts as the main layer of caching provided by Cargo. -/// For example, we want to cache `cargo build` and `cargo doc` separately, so that running one -/// does not invalidate the artifacts for the other. We do this by including `CompileMode` in the -/// hash, thus the artifacts go in different folders and do not override each other. -/// If we don't add something that we should have, for this reason, we get the -/// correct output but rebuild more than is needed. -/// -/// Some things that need to be tracked to ensure the correct output should definitely *not* -/// go in the `Metadata`. For example, the modification time of a file, should be tracked to make a -/// rebuild when the file changes. However, it would be wasteful to include in the `Metadata`. The -/// old artifacts are never going to be needed again. We can save space by just overwriting them. -/// If we add something that we should not have, for this reason, we get the correct output but take -/// more space than needed. This makes not including something in `Metadata` -/// a form of cache invalidation. -/// -/// You should also avoid anything that would interfere with reproducible -/// builds. For example, *any* absolute path should be avoided. This is one -/// reason that `RUSTFLAGS` is not in `Metadata`, because it often has -/// absolute paths (like `--remap-path-prefix` which is fundamentally used for -/// reproducible builds and has absolute paths in it). Also, in some cases the -/// mangled symbols need to be stable between different builds with different -/// settings. For example, profile-guided optimizations need to swap -/// `RUSTFLAGS` between runs, but needs to keep the same symbol names. -/// -/// Note that the `Fingerprint` is in charge of tracking everything needed to determine if a -/// rebuild is needed. -#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd)] -pub struct Metadata(u64); - -impl fmt::Display for Metadata { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:016x}", self.0) - } -} - -impl fmt::Debug for Metadata { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Metadata({:016x})", self.0) - } -} - -/// Information about the metadata hashes used for a `Unit`. -struct MetaInfo { - /// The symbol hash to use. - meta_hash: Metadata, - /// Whether or not the `-C extra-filename` flag is used to generate unique - /// output filenames for this `Unit`. - /// - /// If this is `true`, the `meta_hash` is used for the filename. - use_extra_filename: bool, -} - -/// Collection of information about the files emitted by the compiler, and the -/// output directory structure. -pub struct CompilationFiles<'a, 'cfg> { - /// The target directory layout for the host (and target if it is the same as host). - pub(super) host: Layout, - /// The target directory layout for the target (if different from then host). - pub(super) target: HashMap, - /// Additional directory to include a copy of the outputs. - export_dir: Option, - /// The root targets requested by the user on the command line (does not - /// include dependencies). - roots: Vec, - ws: &'a Workspace<'cfg>, - /// Metadata hash to use for each unit. - metas: HashMap, - /// For each Unit, a list all files produced. - outputs: HashMap>>>, -} - -/// Info about a single file emitted by the compiler. -#[derive(Debug)] -pub struct OutputFile { - /// Absolute path to the file that will be produced by the build process. - pub path: PathBuf, - /// If it should be linked into `target`, and what it should be called - /// (e.g., without metadata). - pub hardlink: Option, - /// If `--out-dir` is specified, the absolute path to the exported file. - pub export_path: Option, - /// Type of the file (library / debug symbol / else). - pub flavor: FileFlavor, -} - -impl OutputFile { - /// Gets the hard link if present; otherwise, returns the path. - pub fn bin_dst(&self) -> &PathBuf { - match self.hardlink { - Some(ref link_dst) => link_dst, - None => &self.path, - } - } -} - -impl<'a, 'cfg: 'a> CompilationFiles<'a, 'cfg> { - pub(super) fn new( - cx: &Context<'a, 'cfg>, - host: Layout, - target: HashMap, - ) -> CompilationFiles<'a, 'cfg> { - let mut metas = HashMap::new(); - for unit in &cx.bcx.roots { - metadata_of(unit, cx, &mut metas); - } - let outputs = metas - .keys() - .cloned() - .map(|unit| (unit, LazyCell::new())) - .collect(); - CompilationFiles { - ws: cx.bcx.ws, - host, - target, - export_dir: cx.bcx.build_config.export_dir.clone(), - roots: cx.bcx.roots.clone(), - metas, - outputs, - } - } - - /// Returns the appropriate directory layout for either a plugin or not. - pub fn layout(&self, kind: CompileKind) -> &Layout { - match kind { - CompileKind::Host => &self.host, - CompileKind::Target(target) => &self.target[&target], - } - } - - /// Gets the metadata for the given unit. - /// - /// See module docs for more details. - pub fn metadata(&self, unit: &Unit) -> Metadata { - self.metas[unit].meta_hash - } - - /// Returns whether or not `-C extra-filename` is used to extend the - /// output filenames to make them unique. - pub fn use_extra_filename(&self, unit: &Unit) -> bool { - self.metas[unit].use_extra_filename - } - - /// Gets the short hash based only on the `PackageId`. - /// Used for the metadata when `metadata` returns `None`. - pub fn target_short_hash(&self, unit: &Unit) -> String { - let hashable = unit.pkg.package_id().stable_hash(self.ws.root()); - util::short_hash(&(METADATA_VERSION, hashable)) - } - - /// Returns the directory where the artifacts for the given unit are - /// initially created. - pub fn out_dir(&self, unit: &Unit) -> PathBuf { - // Docscrape units need to have doc/ set as the out_dir so sources for reverse-dependencies - // will be put into doc/ and not into deps/ where the *.examples files are stored. - if unit.mode.is_doc() || unit.mode.is_doc_scrape() { - self.layout(unit.kind).doc().to_path_buf() - } else if unit.mode.is_doc_test() { - panic!("doc tests do not have an out dir"); - } else if unit.target.is_custom_build() { - self.build_script_dir(unit) - } else if unit.target.is_example() { - self.layout(unit.kind).examples().to_path_buf() - } else { - self.deps_dir(unit).to_path_buf() - } - } - - /// Additional export directory from `--out-dir`. - pub fn export_dir(&self) -> Option { - self.export_dir.clone() - } - - /// Directory name to use for a package in the form `NAME-HASH`. - /// - /// Note that some units may share the same directory, so care should be - /// taken in those cases! - fn pkg_dir(&self, unit: &Unit) -> String { - let name = unit.pkg.package_id().name(); - let meta = &self.metas[unit]; - if meta.use_extra_filename { - format!("{}-{}", name, meta.meta_hash) - } else { - format!("{}-{}", name, self.target_short_hash(unit)) - } - } - - /// Returns the final artifact path for the host (`/โ€ฆ/target/debug`) - pub fn host_dest(&self) -> &Path { - self.host.dest() - } - - /// Returns the root of the build output tree for the host (`/โ€ฆ/target`) - pub fn host_root(&self) -> &Path { - self.host.root() - } - - /// Returns the host `deps` directory path. - pub fn host_deps(&self) -> &Path { - self.host.deps() - } - - /// Returns the directories where Rust crate dependencies are found for the - /// specified unit. - pub fn deps_dir(&self, unit: &Unit) -> &Path { - self.layout(unit.kind).deps() - } - - /// Directory where the fingerprint for the given unit should go. - pub fn fingerprint_dir(&self, unit: &Unit) -> PathBuf { - let dir = self.pkg_dir(unit); - self.layout(unit.kind).fingerprint().join(dir) - } - - /// Returns the path for a file in the fingerprint directory. - /// - /// The "prefix" should be something to distinguish the file from other - /// files in the fingerprint directory. - pub fn fingerprint_file_path(&self, unit: &Unit, prefix: &str) -> PathBuf { - // Different targets need to be distinguished in the - let kind = unit.target.kind().description(); - let flavor = if unit.mode.is_any_test() { - "test-" - } else if unit.mode.is_doc() { - "doc-" - } else if unit.mode.is_run_custom_build() { - "run-" - } else { - "" - }; - let name = format!("{}{}{}-{}", prefix, flavor, kind, unit.target.name()); - self.fingerprint_dir(unit).join(name) - } - - /// Path where compiler output is cached. - pub fn message_cache_path(&self, unit: &Unit) -> PathBuf { - self.fingerprint_file_path(unit, "output-") - } - - /// Returns the directory where a compiled build script is stored. - /// `/path/to/target/{debug,release}/build/PKG-HASH` - pub fn build_script_dir(&self, unit: &Unit) -> PathBuf { - assert!(unit.target.is_custom_build()); - assert!(!unit.mode.is_run_custom_build()); - assert!(self.metas.contains_key(unit)); - let dir = self.pkg_dir(unit); - self.layout(CompileKind::Host).build().join(dir) - } - - /// Returns the directory where information about running a build script - /// is stored. - /// `/path/to/target/{debug,release}/build/PKG-HASH` - pub fn build_script_run_dir(&self, unit: &Unit) -> PathBuf { - assert!(unit.target.is_custom_build()); - assert!(unit.mode.is_run_custom_build()); - let dir = self.pkg_dir(unit); - self.layout(unit.kind).build().join(dir) - } - - /// Returns the "OUT_DIR" directory for running a build script. - /// `/path/to/target/{debug,release}/build/PKG-HASH/out` - pub fn build_script_out_dir(&self, unit: &Unit) -> PathBuf { - self.build_script_run_dir(unit).join("out") - } - - /// Returns the path to the executable binary for the given bin target. - /// - /// This should only to be used when a `Unit` is not available. - pub fn bin_link_for_target( - &self, - target: &Target, - kind: CompileKind, - bcx: &BuildContext<'_, '_>, - ) -> CargoResult { - assert!(target.is_bin()); - let dest = self.layout(kind).dest(); - let info = bcx.target_data.info(kind); - let (file_types, _) = info - .rustc_outputs( - CompileMode::Build, - &TargetKind::Bin, - bcx.target_data.short_name(&kind), - ) - .expect("target must support `bin`"); - - let file_type = file_types - .iter() - .find(|file_type| file_type.flavor == FileFlavor::Normal) - .expect("target must support `bin`"); - - Ok(dest.join(file_type.uplift_filename(target))) - } - - /// Returns the filenames that the given unit will generate. - /// - /// Note: It is not guaranteed that all of the files will be generated. - pub(super) fn outputs( - &self, - unit: &Unit, - bcx: &BuildContext<'a, 'cfg>, - ) -> CargoResult>> { - self.outputs[unit] - .try_borrow_with(|| self.calc_outputs(unit, bcx)) - .map(Arc::clone) - } - - /// Returns the path where the output for the given unit and FileType - /// should be uplifted to. - /// - /// Returns `None` if the unit shouldn't be uplifted (for example, a - /// dependent rlib). - fn uplift_to(&self, unit: &Unit, file_type: &FileType, from_path: &Path) -> Option { - // Tests, check, doc, etc. should not be uplifted. - if unit.mode != CompileMode::Build || file_type.flavor == FileFlavor::Rmeta { - return None; - } - // Only uplift: - // - Binaries: The user always wants to see these, even if they are - // implicitly built (for example for integration tests). - // - dylibs: This ensures that the dynamic linker pulls in all the - // latest copies (even if the dylib was built from a previous cargo - // build). There are complex reasons for this, see #8139, #6167, #6162. - // - Things directly requested from the command-line (the "roots"). - // This one is a little questionable for rlibs (see #6131), but is - // historically how Cargo has operated. This is primarily useful to - // give the user access to staticlibs and cdylibs. - if !unit.target.is_bin() - && !unit.target.is_custom_build() - && file_type.crate_type != Some(CrateType::Dylib) - && !self.roots.contains(unit) - { - return None; - } - - let filename = file_type.uplift_filename(&unit.target); - let uplift_path = if unit.target.is_example() { - // Examples live in their own little world. - self.layout(unit.kind).examples().join(filename) - } else if unit.target.is_custom_build() { - self.build_script_dir(unit).join(filename) - } else { - self.layout(unit.kind).dest().join(filename) - }; - if from_path == uplift_path { - // This can happen with things like examples that reside in the - // same directory, do not have a metadata hash (like on Windows), - // and do not have hyphens. - return None; - } - Some(uplift_path) - } - - fn calc_outputs( - &self, - unit: &Unit, - bcx: &BuildContext<'a, 'cfg>, - ) -> CargoResult>> { - let ret = match unit.mode { - CompileMode::Doc { .. } => { - let path = self - .out_dir(unit) - .join(unit.target.crate_name()) - .join("index.html"); - vec![OutputFile { - path, - hardlink: None, - export_path: None, - flavor: FileFlavor::Normal, - }] - } - CompileMode::RunCustomBuild => { - // At this time, this code path does not handle build script - // outputs. - vec![] - } - CompileMode::Doctest => { - // Doctests are built in a temporary directory and then - // deleted. There is the `--persist-doctests` unstable flag, - // but Cargo does not know about that. - vec![] - } - CompileMode::Docscrape => { - let path = self - .deps_dir(unit) - .join(format!("{}.examples", unit.buildkey())); - vec![OutputFile { - path, - hardlink: None, - export_path: None, - flavor: FileFlavor::Normal, - }] - } - CompileMode::Test - | CompileMode::Build - | CompileMode::Bench - | CompileMode::Check { .. } => self.calc_outputs_rustc(unit, bcx)?, - }; - info!("Target filenames: {:?}", ret); - - Ok(Arc::new(ret)) - } - - /// Computes the actual, full pathnames for all the files generated by rustc. - /// - /// The `OutputFile` also contains the paths where those files should be - /// "uplifted" to. - fn calc_outputs_rustc( - &self, - unit: &Unit, - bcx: &BuildContext<'a, 'cfg>, - ) -> CargoResult> { - let out_dir = self.out_dir(unit); - - let info = bcx.target_data.info(unit.kind); - let triple = bcx.target_data.short_name(&unit.kind); - let (file_types, unsupported) = - info.rustc_outputs(unit.mode, unit.target.kind(), triple)?; - if file_types.is_empty() { - if !unsupported.is_empty() { - let unsupported_strs: Vec<_> = unsupported.iter().map(|ct| ct.as_str()).collect(); - anyhow::bail!( - "cannot produce {} for `{}` as the target `{}` \ - does not support these crate types", - unsupported_strs.join(", "), - unit.pkg, - triple, - ) - } - anyhow::bail!( - "cannot compile `{}` as the target `{}` does not \ - support any of the output crate types", - unit.pkg, - triple, - ); - } - - // Convert FileType to OutputFile. - let mut outputs = Vec::new(); - for file_type in file_types { - let meta = &self.metas[unit]; - let meta_opt = meta.use_extra_filename.then(|| meta.meta_hash.to_string()); - let path = out_dir.join(file_type.output_filename(&unit.target, meta_opt.as_deref())); - - // If, the `different_binary_name` feature is enabled, the name of the hardlink will - // be the name of the binary provided by the user in `Cargo.toml`. - let hardlink = self.uplift_to(unit, &file_type, &path); - let export_path = if unit.target.is_custom_build() { - None - } else { - self.export_dir.as_ref().and_then(|export_dir| { - hardlink - .as_ref() - .map(|hardlink| export_dir.join(hardlink.file_name().unwrap())) - }) - }; - outputs.push(OutputFile { - path, - hardlink, - export_path, - flavor: file_type.flavor, - }); - } - Ok(outputs) - } -} - -fn metadata_of<'a>( - unit: &Unit, - cx: &Context<'_, '_>, - metas: &'a mut HashMap, -) -> &'a MetaInfo { - if !metas.contains_key(unit) { - let meta = compute_metadata(unit, cx, metas); - metas.insert(unit.clone(), meta); - for dep in cx.unit_deps(unit) { - metadata_of(&dep.unit, cx, metas); - } - } - &metas[unit] -} - -fn compute_metadata( - unit: &Unit, - cx: &Context<'_, '_>, - metas: &mut HashMap, -) -> MetaInfo { - let bcx = &cx.bcx; - let mut hasher = StableHasher::new(); - - METADATA_VERSION.hash(&mut hasher); - - // Unique metadata per (name, source, version) triple. This'll allow us - // to pull crates from anywhere without worrying about conflicts. - unit.pkg - .package_id() - .stable_hash(bcx.ws.root()) - .hash(&mut hasher); - - // Also mix in enabled features to our metadata. This'll ensure that - // when changing feature sets each lib is separately cached. - unit.features.hash(&mut hasher); - - // Mix in the target-metadata of all the dependencies of this target. - let mut deps_metadata = cx - .unit_deps(unit) - .iter() - .map(|dep| metadata_of(&dep.unit, cx, metas).meta_hash) - .collect::>(); - deps_metadata.sort(); - deps_metadata.hash(&mut hasher); - - // Throw in the profile we're compiling with. This helps caching - // `panic=abort` and `panic=unwind` artifacts, additionally with various - // settings like debuginfo and whatnot. - unit.profile.hash(&mut hasher); - unit.mode.hash(&mut hasher); - cx.lto[unit].hash(&mut hasher); - - // Artifacts compiled for the host should have a different metadata - // piece than those compiled for the target, so make sure we throw in - // the unit's `kind` as well - unit.kind.hash(&mut hasher); - - // Finally throw in the target name/kind. This ensures that concurrent - // compiles of targets in the same crate don't collide. - unit.target.name().hash(&mut hasher); - unit.target.kind().hash(&mut hasher); - - hash_rustc_version(bcx, &mut hasher); - - if cx.bcx.ws.is_member(&unit.pkg) { - // This is primarily here for clippy. This ensures that the clippy - // artifacts are separate from the `check` ones. - if let Some(path) = &cx.bcx.rustc().workspace_wrapper { - path.hash(&mut hasher); - } - } - - // Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present. - // This should be the release channel, to get a different hash for each channel. - if let Ok(ref channel) = env::var("__CARGO_DEFAULT_LIB_METADATA") { - channel.hash(&mut hasher); - } - - // std units need to be kept separate from user dependencies. std crates - // are differentiated in the Unit with `is_std` (for things like - // `-Zforce-unstable-if-unmarked`), so they are always built separately. - // This isn't strictly necessary for build dependencies which probably - // don't need unstable support. A future experiment might be to set - // `is_std` to false for build dependencies so that they can be shared - // with user dependencies. - unit.is_std.hash(&mut hasher); - - MetaInfo { - meta_hash: Metadata(hasher.finish()), - use_extra_filename: should_use_metadata(bcx, unit), - } -} - -fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { - let vers = &bcx.rustc().version; - if vers.pre.is_empty() || bcx.config.cli_unstable().separate_nightlies { - // For stable, keep the artifacts separate. This helps if someone is - // testing multiple versions, to avoid recompiles. - bcx.rustc().verbose_version.hash(hasher); - return; - } - // On "nightly"/"beta"/"dev"/etc, keep each "channel" separate. Don't hash - // the date/git information, so that whenever someone updates "nightly", - // they won't have a bunch of stale artifacts in the target directory. - // - // This assumes that the first segment is the important bit ("nightly", - // "beta", "dev", etc.). Skip other parts like the `.3` in `-beta.3`. - vers.pre.split('.').next().hash(hasher); - // Keep "host" since some people switch hosts to implicitly change - // targets, (like gnu vs musl or gnu vs msvc). In the future, we may want - // to consider hashing `unit.kind.short_name()` instead. - bcx.rustc().host.hash(hasher); - // None of the other lines are important. Currently they are: - // binary: rustc <-- or "rustdoc" - // commit-hash: 38114ff16e7856f98b2b4be7ab4cd29b38bed59a - // commit-date: 2020-03-21 - // host: x86_64-apple-darwin - // release: 1.44.0-nightly - // LLVM version: 9.0 - // - // The backend version ("LLVM version") might become more relevant in - // the future when cranelift sees more use, and people want to switch - // between different backends without recompiling. -} - -/// Returns whether or not this unit should use a metadata hash. -fn should_use_metadata(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool { - if unit.mode.is_doc_test() || unit.mode.is_doc() { - // Doc tests do not have metadata. - return false; - } - if unit.mode.is_any_test() || unit.mode.is_check() { - // These always use metadata. - return true; - } - // No metadata in these cases: - // - // - dylibs: - // - if any dylib names are encoded in executables, so they can't be renamed. - // - TODO: Maybe use `-install-name` on macOS or `-soname` on other UNIX systems - // to specify the dylib name to be used by the linker instead of the filename. - // - Windows MSVC executables: The path to the PDB is embedded in the - // executable, and we don't want the PDB path to include the hash in it. - // - wasm32-unknown-emscripten executables: When using emscripten, the path to the - // .wasm file is embedded in the .js file, so we don't want the hash in there. - // - // This is only done for local packages, as we don't expect to export - // dependencies. - // - // The __CARGO_DEFAULT_LIB_METADATA env var is used to override this to - // force metadata in the hash. This is only used for building libstd. For - // example, if libstd is placed in a common location, we don't want a file - // named /usr/lib/libstd.so which could conflict with other rustc - // installs. In addition it prevents accidentally loading a libstd of a - // different compiler at runtime. - // See https://github.com/rust-lang/cargo/issues/3005 - let short_name = bcx.target_data.short_name(&unit.kind); - if (unit.target.is_dylib() - || unit.target.is_cdylib() - || (unit.target.is_executable() && short_name == "wasm32-unknown-emscripten") - || (unit.target.is_executable() && short_name.contains("msvc"))) - && unit.pkg.package_id().source_id().is_path() - && env::var("__CARGO_DEFAULT_LIB_METADATA").is_err() - { - return false; - } - true -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/context/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/context/mod.rs deleted file mode 100644 index 6a81eb4ae..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/context/mod.rs +++ /dev/null @@ -1,662 +0,0 @@ -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::path::{Path, PathBuf}; -use std::sync::{Arc, Mutex}; - -use crate::core::compiler::compilation::{self, UnitOutput}; -use crate::core::compiler::{self, Unit}; -use crate::core::PackageId; -use crate::util::errors::CargoResult; -use crate::util::profile; -use anyhow::{bail, Context as _}; -use filetime::FileTime; -use jobserver::Client; - -use super::build_plan::BuildPlan; -use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts}; -use super::fingerprint::Fingerprint; -use super::job_queue::JobQueue; -use super::layout::Layout; -use super::lto::Lto; -use super::unit_graph::UnitDep; -use super::{ - BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor, RustDocFingerprint, -}; - -mod compilation_files; -use self::compilation_files::CompilationFiles; -pub use self::compilation_files::{Metadata, OutputFile}; - -/// Collection of all the stuff that is needed to perform a build. -pub struct Context<'a, 'cfg> { - /// Mostly static information about the build task. - pub bcx: &'a BuildContext<'a, 'cfg>, - /// A large collection of information about the result of the entire compilation. - pub compilation: Compilation<'cfg>, - /// Output from build scripts, updated after each build script runs. - pub build_script_outputs: Arc>, - /// Dependencies (like rerun-if-changed) declared by a build script. - /// This is *only* populated from the output from previous runs. - /// If the build script hasn't ever been run, then it must be run. - pub build_explicit_deps: HashMap, - /// Fingerprints used to detect if a unit is out-of-date. - pub fingerprints: HashMap>, - /// Cache of file mtimes to reduce filesystem hits. - pub mtime_cache: HashMap, - /// A set used to track which units have been compiled. - /// A unit may appear in the job graph multiple times as a dependency of - /// multiple packages, but it only needs to run once. - pub compiled: HashSet, - /// Linking information for each `Unit`. - /// See `build_map` for details. - pub build_scripts: HashMap>, - /// Job server client to manage concurrency with other processes. - pub jobserver: Client, - /// "Primary" packages are the ones the user selected on the command-line - /// with `-p` flags. If no flags are specified, then it is the defaults - /// based on the current directory and the default workspace members. - primary_packages: HashSet, - /// An abstraction of the files and directories that will be generated by - /// the compilation. This is `None` until after `unit_dependencies` has - /// been computed. - files: Option>, - - /// A flag indicating whether pipelining is enabled for this compilation - /// session. Pipelining largely only affects the edges of the dependency - /// graph that we generate at the end, and otherwise it's pretty - /// straightforward. - pipelining: bool, - - /// A set of units which are compiling rlibs and are expected to produce - /// metadata files in addition to the rlib itself. This is only filled in - /// when `pipelining` above is enabled. - rmeta_required: HashSet, - - /// When we're in jobserver-per-rustc process mode, this keeps those - /// jobserver clients for each Unit (which eventually becomes a rustc - /// process). - pub rustc_clients: HashMap, - - /// Map of the LTO-status of each unit. This indicates what sort of - /// compilation is happening (only object, only bitcode, both, etc), and is - /// precalculated early on. - pub lto: HashMap, - - /// Map of Doc/Docscrape units to metadata for their -Cmetadata flag. - /// See Context::find_metadata_units for more details. - pub metadata_for_doc_units: HashMap, -} - -impl<'a, 'cfg> Context<'a, 'cfg> { - pub fn new(bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult { - // Load up the jobserver that we'll use to manage our parallelism. This - // is the same as the GNU make implementation of a jobserver, and - // intentionally so! It's hoped that we can interact with GNU make and - // all share the same jobserver. - // - // Note that if we don't have a jobserver in our environment then we - // create our own, and we create it with `n` tokens, but immediately - // acquire one, because one token is ourself, a running process. - let jobserver = match bcx.config.jobserver_from_env() { - Some(c) => c.clone(), - None => { - let client = Client::new(bcx.build_config.jobs as usize) - .with_context(|| "failed to create jobserver")?; - client.acquire_raw()?; - client - } - }; - - let pipelining = bcx.config.build_config()?.pipelining.unwrap_or(true); - - Ok(Self { - bcx, - compilation: Compilation::new(bcx)?, - build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())), - fingerprints: HashMap::new(), - mtime_cache: HashMap::new(), - compiled: HashSet::new(), - build_scripts: HashMap::new(), - build_explicit_deps: HashMap::new(), - jobserver, - primary_packages: HashSet::new(), - files: None, - rmeta_required: HashSet::new(), - rustc_clients: HashMap::new(), - pipelining, - lto: HashMap::new(), - metadata_for_doc_units: HashMap::new(), - }) - } - - /// Starts compilation, waits for it to finish, and returns information - /// about the result of compilation. - pub fn compile(mut self, exec: &Arc) -> CargoResult> { - let mut queue = JobQueue::new(self.bcx); - let mut plan = BuildPlan::new(); - let build_plan = self.bcx.build_config.build_plan; - self.lto = super::lto::generate(self.bcx)?; - self.prepare_units()?; - self.prepare()?; - custom_build::build_map(&mut self)?; - self.check_collisions()?; - self.compute_metadata_for_doc_units(); - - // We need to make sure that if there were any previous docs - // already compiled, they were compiled with the same Rustc version that we're currently - // using. Otherways we must remove the `doc/` folder and compile again forcing a rebuild. - // - // This is important because the `.js`/`.html` & `.css` files that are generated by Rustc don't have - // any versioning (See https://github.com/rust-lang/cargo/issues/8461). - // Therefore, we can end up with weird bugs and behaviours if we mix different - // versions of these files. - if self.bcx.build_config.mode.is_doc() { - RustDocFingerprint::check_rustdoc_fingerprint(&self)? - } - - for unit in &self.bcx.roots { - // Build up a list of pending jobs, each of which represent - // compiling a particular package. No actual work is executed as - // part of this, that's all done next as part of the `execute` - // function which will run everything in order with proper - // parallelism. - let force_rebuild = self.bcx.build_config.force_rebuild; - super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?; - } - - // Now that we've got the full job queue and we've done all our - // fingerprint analysis to determine what to run, bust all the memoized - // fingerprint hashes to ensure that during the build they all get the - // most up-to-date values. In theory we only need to bust hashes that - // transitively depend on a dirty build script, but it shouldn't matter - // that much for performance anyway. - for fingerprint in self.fingerprints.values() { - fingerprint.clear_memoized(); - } - - // Now that we've figured out everything that we're going to do, do it! - queue.execute(&mut self, &mut plan)?; - - if build_plan { - plan.set_inputs(self.build_plan_inputs()?); - plan.output_plan(self.bcx.config); - } - - // Collect the result of the build into `self.compilation`. - for unit in &self.bcx.roots { - // Collect tests and executables. - for output in self.outputs(unit)?.iter() { - if output.flavor == FileFlavor::DebugInfo || output.flavor == FileFlavor::Auxiliary - { - continue; - } - - let bindst = output.bin_dst(); - - if unit.mode == CompileMode::Test { - self.compilation - .tests - .push(self.unit_output(unit, &output.path)); - } else if unit.target.is_executable() { - self.compilation - .binaries - .push(self.unit_output(unit, bindst)); - } else if unit.target.is_cdylib() - && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit) - { - self.compilation - .cdylibs - .push(self.unit_output(unit, bindst)); - } - } - - // If the unit has a build script, add `OUT_DIR` to the - // environment variables. - if unit.target.is_lib() { - for dep in &self.bcx.unit_graph[unit] { - if dep.unit.mode.is_run_custom_build() { - let out_dir = self - .files() - .build_script_out_dir(&dep.unit) - .display() - .to_string(); - let script_meta = self.get_run_build_script_metadata(&dep.unit); - self.compilation - .extra_env - .entry(script_meta) - .or_insert_with(Vec::new) - .push(("OUT_DIR".to_string(), out_dir)); - } - } - } - - // Collect information for `rustdoc --test`. - if unit.mode.is_doc_test() { - let mut unstable_opts = false; - let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?; - args.extend(compiler::lto_args(&self, unit)); - - for feature in &unit.features { - args.push("--cfg".into()); - args.push(format!("feature=\"{}\"", feature).into()); - } - let script_meta = self.find_build_script_metadata(unit); - if let Some(meta) = script_meta { - if let Some(output) = self.build_script_outputs.lock().unwrap().get(meta) { - for cfg in &output.cfgs { - args.push("--cfg".into()); - args.push(cfg.into()); - } - - for (lt, arg) in &output.linker_args { - if lt.applies_to(&unit.target) { - args.push("-C".into()); - args.push(format!("link-arg={}", arg).into()); - } - } - } - } - args.extend(self.bcx.rustdocflags_args(unit).iter().map(Into::into)); - - use super::MessageFormat; - let format = match self.bcx.build_config.message_format { - MessageFormat::Short => "short", - MessageFormat::Human => "human", - MessageFormat::Json { .. } => "json", - }; - args.push("--error-format".into()); - args.push(format.into()); - - self.compilation.to_doc_test.push(compilation::Doctest { - unit: unit.clone(), - args, - unstable_opts, - linker: self.bcx.linker(unit.kind), - script_meta, - }); - } - - super::output_depinfo(&mut self, unit)?; - } - - for (script_meta, output) in self.build_script_outputs.lock().unwrap().iter() { - self.compilation - .extra_env - .entry(*script_meta) - .or_insert_with(Vec::new) - .extend(output.env.iter().cloned()); - - for dir in output.library_paths.iter() { - self.compilation.native_dirs.insert(dir.clone()); - } - } - Ok(self.compilation) - } - - /// Returns the executable for the specified unit (if any). - pub fn get_executable(&mut self, unit: &Unit) -> CargoResult> { - let is_binary = unit.target.is_executable(); - let is_test = unit.mode.is_any_test(); - if !unit.mode.generates_executable() || !(is_binary || is_test) { - return Ok(None); - } - Ok(self - .outputs(unit)? - .iter() - .find(|o| o.flavor == FileFlavor::Normal) - .map(|output| output.bin_dst().clone())) - } - - pub fn prepare_units(&mut self) -> CargoResult<()> { - let dest = self.bcx.profiles.get_dir_name(); - let host_layout = Layout::new(self.bcx.ws, None, &dest)?; - let mut targets = HashMap::new(); - for kind in self.bcx.all_kinds.iter() { - if let CompileKind::Target(target) = *kind { - let layout = Layout::new(self.bcx.ws, Some(target), &dest)?; - targets.insert(target, layout); - } - } - self.primary_packages - .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id())); - self.compilation - .root_crate_names - .extend(self.bcx.roots.iter().map(|u| u.target.crate_name())); - - self.record_units_requiring_metadata(); - - let files = CompilationFiles::new(self, host_layout, targets); - self.files = Some(files); - Ok(()) - } - - /// Prepare this context, ensuring that all filesystem directories are in - /// place. - pub fn prepare(&mut self) -> CargoResult<()> { - let _p = profile::start("preparing layout"); - - self.files_mut() - .host - .prepare() - .with_context(|| "couldn't prepare build directories")?; - for target in self.files.as_mut().unwrap().target.values_mut() { - target - .prepare() - .with_context(|| "couldn't prepare build directories")?; - } - - let files = self.files.as_ref().unwrap(); - for &kind in self.bcx.all_kinds.iter() { - let layout = files.layout(kind); - self.compilation - .root_output - .insert(kind, layout.dest().to_path_buf()); - self.compilation - .deps_output - .insert(kind, layout.deps().to_path_buf()); - } - Ok(()) - } - - pub fn files(&self) -> &CompilationFiles<'a, 'cfg> { - self.files.as_ref().unwrap() - } - - fn files_mut(&mut self) -> &mut CompilationFiles<'a, 'cfg> { - self.files.as_mut().unwrap() - } - - /// Returns the filenames that the given unit will generate. - pub fn outputs(&self, unit: &Unit) -> CargoResult>> { - self.files.as_ref().unwrap().outputs(unit, self.bcx) - } - - /// Direct dependencies for the given unit. - pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] { - &self.bcx.unit_graph[unit] - } - - /// Returns the RunCustomBuild Unit associated with the given Unit. - /// - /// If the package does not have a build script, this returns None. - pub fn find_build_script_unit(&self, unit: &Unit) -> Option { - if unit.mode.is_run_custom_build() { - return Some(unit.clone()); - } - self.bcx.unit_graph[unit] - .iter() - .find(|unit_dep| { - unit_dep.unit.mode.is_run_custom_build() - && unit_dep.unit.pkg.package_id() == unit.pkg.package_id() - }) - .map(|unit_dep| unit_dep.unit.clone()) - } - - /// Returns the metadata hash for the RunCustomBuild Unit associated with - /// the given unit. - /// - /// If the package does not have a build script, this returns None. - pub fn find_build_script_metadata(&self, unit: &Unit) -> Option { - let script_unit = self.find_build_script_unit(unit)?; - Some(self.get_run_build_script_metadata(&script_unit)) - } - - /// Returns the metadata hash for a RunCustomBuild unit. - pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { - assert!(unit.mode.is_run_custom_build()); - self.files().metadata(unit) - } - - pub fn is_primary_package(&self, unit: &Unit) -> bool { - self.primary_packages.contains(&unit.pkg.package_id()) - } - - /// Returns the list of filenames read by cargo to generate the `BuildContext` - /// (all `Cargo.toml`, etc.). - pub fn build_plan_inputs(&self) -> CargoResult> { - // Keep sorted for consistency. - let mut inputs = BTreeSet::new(); - // Note: dev-deps are skipped if they are not present in the unit graph. - for unit in self.bcx.unit_graph.keys() { - inputs.insert(unit.pkg.manifest_path().to_path_buf()); - } - Ok(inputs.into_iter().collect()) - } - - /// Returns a [`UnitOutput`] which represents some information about the - /// output of a unit. - pub fn unit_output(&self, unit: &Unit, path: &Path) -> UnitOutput { - let script_meta = self.find_build_script_metadata(unit); - UnitOutput { - unit: unit.clone(), - path: path.to_path_buf(), - script_meta, - } - } - - fn check_collisions(&self) -> CargoResult<()> { - let mut output_collisions = HashMap::new(); - let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String { - format!( - "The {} target `{}` in package `{}` has the same output \ - filename as the {} target `{}` in package `{}`.\n\ - Colliding filename is: {}\n", - unit.target.kind().description(), - unit.target.name(), - unit.pkg.package_id(), - other_unit.target.kind().description(), - other_unit.target.name(), - other_unit.pkg.package_id(), - path.display() - ) - }; - let suggestion = - "Consider changing their names to be unique or compiling them separately.\n\ - This may become a hard error in the future; see \ - ."; - let rustdoc_suggestion = - "This is a known bug where multiple crates with the same name use\n\ - the same path; see ."; - let report_collision = |unit: &Unit, - other_unit: &Unit, - path: &PathBuf, - suggestion: &str| - -> CargoResult<()> { - if unit.target.name() == other_unit.target.name() { - self.bcx.config.shell().warn(format!( - "output filename collision.\n\ - {}\ - The targets should have unique names.\n\ - {}", - describe_collision(unit, other_unit, path), - suggestion - )) - } else { - self.bcx.config.shell().warn(format!( - "output filename collision.\n\ - {}\ - The output filenames should be unique.\n\ - {}\n\ - If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\ - https://github.com/rust-lang/cargo/issues/ with as much information as you\n\ - can provide.\n\ - cargo {} running on `{}` target `{}`\n\ - First unit: {:?}\n\ - Second unit: {:?}", - describe_collision(unit, other_unit, path), - suggestion, - crate::version(), - self.bcx.host_triple(), - self.bcx.target_data.short_name(&unit.kind), - unit, - other_unit)) - } - }; - - fn doc_collision_error(unit: &Unit, other_unit: &Unit) -> CargoResult<()> { - bail!( - "document output filename collision\n\ - The {} `{}` in package `{}` has the same name as the {} `{}` in package `{}`.\n\ - Only one may be documented at once since they output to the same path.\n\ - Consider documenting only one, renaming one, \ - or marking one with `doc = false` in Cargo.toml.", - unit.target.kind().description(), - unit.target.name(), - unit.pkg, - other_unit.target.kind().description(), - other_unit.target.name(), - other_unit.pkg, - ); - } - - let mut keys = self - .bcx - .unit_graph - .keys() - .filter(|unit| !unit.mode.is_run_custom_build()) - .collect::>(); - // Sort for consistent error messages. - keys.sort_unstable(); - // These are kept separate to retain compatibility with older - // versions, which generated an error when there was a duplicate lib - // or bin (but the old code did not check bin<->lib collisions). To - // retain backwards compatibility, this only generates an error for - // duplicate libs or duplicate bins (but not both). Ideally this - // shouldn't be here, but since there isn't a complete workaround, - // yet, this retains the old behavior. - let mut doc_libs = HashMap::new(); - let mut doc_bins = HashMap::new(); - for unit in keys { - if unit.mode.is_doc() && self.is_primary_package(unit) { - // These situations have been an error since before 1.0, so it - // is not a warning like the other situations. - if unit.target.is_lib() { - if let Some(prev) = doc_libs.insert((unit.target.crate_name(), unit.kind), unit) - { - doc_collision_error(unit, prev)?; - } - } else if let Some(prev) = - doc_bins.insert((unit.target.crate_name(), unit.kind), unit) - { - doc_collision_error(unit, prev)?; - } - } - for output in self.outputs(unit)?.iter() { - if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) { - if unit.mode.is_doc() { - // See https://github.com/rust-lang/rust/issues/56169 - // and https://github.com/rust-lang/rust/issues/61378 - report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?; - } else { - report_collision(unit, other_unit, &output.path, suggestion)?; - } - } - if let Some(hardlink) = output.hardlink.as_ref() { - if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) { - report_collision(unit, other_unit, hardlink, suggestion)?; - } - } - if let Some(ref export_path) = output.export_path { - if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) { - self.bcx.config.shell().warn(format!( - "`--out-dir` filename collision.\n\ - {}\ - The exported filenames should be unique.\n\ - {}", - describe_collision(unit, other_unit, export_path), - suggestion - ))?; - } - } - } - } - Ok(()) - } - - /// Records the list of units which are required to emit metadata. - /// - /// Units which depend only on the metadata of others requires the others to - /// actually produce metadata, so we'll record that here. - fn record_units_requiring_metadata(&mut self) { - for (key, deps) in self.bcx.unit_graph.iter() { - for dep in deps { - if self.only_requires_rmeta(key, &dep.unit) { - self.rmeta_required.insert(dep.unit.clone()); - } - } - } - } - - /// Returns whether when `parent` depends on `dep` if it only requires the - /// metadata file from `dep`. - pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool { - // this is only enabled when pipelining is enabled - self.pipelining - // We're only a candidate for requiring an `rmeta` file if we - // ourselves are building an rlib, - && !parent.requires_upstream_objects() - && parent.mode == CompileMode::Build - // Our dependency must also be built as an rlib, otherwise the - // object code must be useful in some fashion - && !dep.requires_upstream_objects() - && dep.mode == CompileMode::Build - } - - /// Returns whether when `unit` is built whether it should emit metadata as - /// well because some compilations rely on that. - pub fn rmeta_required(&self, unit: &Unit) -> bool { - self.rmeta_required.contains(unit) - } - - pub fn new_jobserver(&mut self) -> CargoResult { - let tokens = self.bcx.build_config.jobs as usize; - let client = Client::new(tokens).with_context(|| "failed to create jobserver")?; - - // Drain the client fully - for i in 0..tokens { - client.acquire_raw().with_context(|| { - format!( - "failed to fully drain {}/{} token from jobserver at startup", - i, tokens, - ) - })?; - } - - Ok(client) - } - - /// Finds metadata for Doc/Docscrape units. - /// - /// rustdoc needs a -Cmetadata flag in order to recognize StableCrateIds that refer to - /// items in the crate being documented. The -Cmetadata flag used by reverse-dependencies - /// will be the metadata of the Cargo unit that generated the current library's rmeta file, - /// which should be a Check unit. - /// - /// If the current crate has reverse-dependencies, such a Check unit should exist, and so - /// we use that crate's metadata. If not, we use the crate's Doc unit so at least examples - /// scraped from the current crate can be used when documenting the current crate. - pub fn compute_metadata_for_doc_units(&mut self) { - for unit in self.bcx.unit_graph.keys() { - if !unit.mode.is_doc() && !unit.mode.is_doc_scrape() { - continue; - } - - let matching_units = self - .bcx - .unit_graph - .keys() - .filter(|other| { - unit.pkg == other.pkg - && unit.target == other.target - && !other.mode.is_doc_scrape() - }) - .collect::>(); - let metadata_unit = matching_units - .iter() - .find(|other| other.mode.is_check()) - .or_else(|| matching_units.iter().find(|other| other.mode.is_doc())) - .unwrap_or(&unit); - self.metadata_for_doc_units - .insert(unit.clone(), self.files().metadata(metadata_unit)); - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/crate_type.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/crate_type.rs deleted file mode 100644 index 763f99a65..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/crate_type.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::fmt; - -#[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub enum CrateType { - Bin, - Lib, - Rlib, - Dylib, - Cdylib, - Staticlib, - ProcMacro, - Other(String), -} - -impl CrateType { - pub fn as_str(&self) -> &str { - match self { - CrateType::Bin => "bin", - CrateType::Lib => "lib", - CrateType::Rlib => "rlib", - CrateType::Dylib => "dylib", - CrateType::Cdylib => "cdylib", - CrateType::Staticlib => "staticlib", - CrateType::ProcMacro => "proc-macro", - CrateType::Other(s) => s, - } - } - - pub fn can_lto(&self) -> bool { - match self { - CrateType::Bin | CrateType::Staticlib | CrateType::Cdylib => true, - CrateType::Lib - | CrateType::Rlib - | CrateType::Dylib - | CrateType::ProcMacro - | CrateType::Other(..) => false, - } - } - - pub fn is_linkable(&self) -> bool { - match self { - CrateType::Lib | CrateType::Rlib | CrateType::Dylib | CrateType::ProcMacro => true, - CrateType::Bin | CrateType::Cdylib | CrateType::Staticlib | CrateType::Other(..) => { - false - } - } - } - - pub fn is_dynamic(&self) -> bool { - match self { - CrateType::Dylib | CrateType::Cdylib | CrateType::ProcMacro => true, - CrateType::Lib - | CrateType::Rlib - | CrateType::Bin - | CrateType::Staticlib - | CrateType::Other(..) => false, - } - } - - pub fn requires_upstream_objects(&self) -> bool { - // "lib" == "rlib" and is a compilation that doesn't actually - // require upstream object files to exist, only upstream metadata - // files. As a result, it doesn't require upstream artifacts - - !matches!(self, CrateType::Lib | CrateType::Rlib) - // Everything else, however, is some form of "linkable output" or - // something that requires upstream object files. - } -} - -impl fmt::Display for CrateType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.as_str().fmt(f) - } -} - -impl<'a> From<&'a String> for CrateType { - fn from(s: &'a String) -> Self { - match s.as_str() { - "bin" => CrateType::Bin, - "lib" => CrateType::Lib, - "rlib" => CrateType::Rlib, - "dylib" => CrateType::Dylib, - "cdylib" => CrateType::Cdylib, - "staticlib" => CrateType::Staticlib, - "procmacro" => CrateType::ProcMacro, - _ => CrateType::Other(s.clone()), - } - } -} - -impl fmt::Debug for CrateType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.to_string().fmt(f) - } -} - -impl serde::Serialize for CrateType { - fn serialize(&self, s: S) -> Result - where - S: serde::ser::Serializer, - { - self.to_string().serialize(s) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/custom_build.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/custom_build.rs deleted file mode 100644 index ae627c926..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/custom_build.rs +++ /dev/null @@ -1,989 +0,0 @@ -use super::job::{Freshness, Job, Work}; -use super::{fingerprint, Context, LinkType, Unit}; -use crate::core::compiler::context::Metadata; -use crate::core::compiler::job_queue::JobState; -use crate::core::{profiles::ProfileRoot, PackageId, Target}; -use crate::util::errors::CargoResult; -use crate::util::machine_message::{self, Message}; -use crate::util::{internal, profile}; -use anyhow::{bail, Context as _}; -use cargo_platform::Cfg; -use cargo_util::paths; -use std::collections::hash_map::{Entry, HashMap}; -use std::collections::{BTreeSet, HashSet}; -use std::path::{Path, PathBuf}; -use std::str; -use std::sync::{Arc, Mutex}; - -const CARGO_WARNING: &str = "cargo:warning="; - -/// Contains the parsed output of a custom build script. -#[derive(Clone, Debug, Hash, Default)] -pub struct BuildOutput { - /// Paths to pass to rustc with the `-L` flag. - pub library_paths: Vec, - /// Names and link kinds of libraries, suitable for the `-l` flag. - pub library_links: Vec, - /// Linker arguments suitable to be passed to `-C link-arg=` - pub linker_args: Vec<(LinkType, String)>, - /// Various `--cfg` flags to pass to the compiler. - pub cfgs: Vec, - /// Additional environment variables to run the compiler with. - pub env: Vec<(String, String)>, - /// Metadata to pass to the immediate dependencies. - pub metadata: Vec<(String, String)>, - /// Paths to trigger a rerun of this build script. - /// May be absolute or relative paths (relative to package root). - pub rerun_if_changed: Vec, - /// Environment variables which, when changed, will cause a rebuild. - pub rerun_if_env_changed: Vec, - /// Warnings generated by this build. - /// - /// These are only displayed if this is a "local" package, `-vv` is used, - /// or there is a build error for any target in this package. - pub warnings: Vec, -} - -/// Map of packages to build script output. -/// -/// This initially starts out as empty. Overridden build scripts get -/// inserted during `build_map`. The rest of the entries are added -/// immediately after each build script runs. -/// -/// The `Metadata` is the unique metadata hash for the RunCustomBuild Unit of -/// the package. It needs a unique key, since the build script can be run -/// multiple times with different profiles or features. We can't embed a -/// `Unit` because this structure needs to be shareable between threads. -#[derive(Default)] -pub struct BuildScriptOutputs { - outputs: HashMap, -} - -/// Linking information for a `Unit`. -/// -/// See `build_map` for more details. -#[derive(Default)] -pub struct BuildScripts { - /// List of build script outputs this Unit needs to include for linking. Each - /// element is an index into `BuildScriptOutputs`. - /// - /// Cargo will use this `to_link` vector to add `-L` flags to compiles as we - /// propagate them upwards towards the final build. Note, however, that we - /// need to preserve the ordering of `to_link` to be topologically sorted. - /// This will ensure that build scripts which print their paths properly will - /// correctly pick up the files they generated (if there are duplicates - /// elsewhere). - /// - /// To preserve this ordering, the (id, metadata) is stored in two places, once - /// in the `Vec` and once in `seen_to_link` for a fast lookup. We maintain - /// this as we're building interactively below to ensure that the memory - /// usage here doesn't blow up too much. - /// - /// For more information, see #2354. - pub to_link: Vec<(PackageId, Metadata)>, - /// This is only used while constructing `to_link` to avoid duplicates. - seen_to_link: HashSet<(PackageId, Metadata)>, - /// Host-only dependencies that have build scripts. Each element is an - /// index into `BuildScriptOutputs`. - /// - /// This is the set of transitive dependencies that are host-only - /// (proc-macro, plugin, build-dependency) that contain a build script. - /// Any `BuildOutput::library_paths` path relative to `target` will be - /// added to LD_LIBRARY_PATH so that the compiler can find any dynamic - /// libraries a build script may have generated. - pub plugins: BTreeSet<(PackageId, Metadata)>, -} - -/// Dependency information as declared by a build script. -#[derive(Debug)] -pub struct BuildDeps { - /// Absolute path to the file in the target directory that stores the - /// output of the build script. - pub build_script_output: PathBuf, - /// Files that trigger a rebuild if they change. - pub rerun_if_changed: Vec, - /// Environment variables that trigger a rebuild if they change. - pub rerun_if_env_changed: Vec, -} - -/// Prepares a `Work` that executes the target as a custom build script. -pub fn prepare(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { - let _p = profile::start(format!( - "build script prepare: {}/{}", - unit.pkg, - unit.target.name() - )); - - let metadata = cx.get_run_build_script_metadata(unit); - if cx - .build_script_outputs - .lock() - .unwrap() - .contains_key(metadata) - { - // The output is already set, thus the build script is overridden. - fingerprint::prepare_target(cx, unit, false) - } else { - build_work(cx, unit) - } -} - -fn emit_build_output( - state: &JobState<'_, '_>, - output: &BuildOutput, - out_dir: &Path, - package_id: PackageId, -) -> CargoResult<()> { - let library_paths = output - .library_paths - .iter() - .map(|l| l.display().to_string()) - .collect::>(); - - let msg = machine_message::BuildScript { - package_id, - linked_libs: &output.library_links, - linked_paths: &library_paths, - cfgs: &output.cfgs, - env: &output.env, - out_dir, - } - .to_json_string(); - state.stdout(msg)?; - Ok(()) -} - -fn build_work(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { - assert!(unit.mode.is_run_custom_build()); - let bcx = &cx.bcx; - let dependencies = cx.unit_deps(unit); - let build_script_unit = dependencies - .iter() - .find(|d| !d.unit.mode.is_run_custom_build() && d.unit.target.is_custom_build()) - .map(|d| &d.unit) - .expect("running a script not depending on an actual script"); - let script_dir = cx.files().build_script_dir(build_script_unit); - let script_out_dir = cx.files().build_script_out_dir(unit); - let script_run_dir = cx.files().build_script_run_dir(unit); - let build_plan = bcx.build_config.build_plan; - let invocation_name = unit.buildkey(); - - if let Some(deps) = unit.pkg.manifest().metabuild() { - prepare_metabuild(cx, build_script_unit, deps)?; - } - - // Building the command to execute - let to_exec = script_dir.join(unit.target.name()); - - // Start preparing the process to execute, starting out with some - // environment variables. Note that the profile-related environment - // variables are not set with this the build script's profile but rather the - // package's library profile. - // NOTE: if you add any profile flags, be sure to update - // `Profiles::get_profile_run_custom_build` so that those flags get - // carried over. - let to_exec = to_exec.into_os_string(); - let mut cmd = cx.compilation.host_process(to_exec, &unit.pkg)?; - let debug = unit.profile.debuginfo.unwrap_or(0) != 0; - cmd.env("OUT_DIR", &script_out_dir) - .env("CARGO_MANIFEST_DIR", unit.pkg.root()) - .env("NUM_JOBS", &bcx.jobs().to_string()) - .env("TARGET", bcx.target_data.short_name(&unit.kind)) - .env("DEBUG", debug.to_string()) - .env("OPT_LEVEL", &unit.profile.opt_level.to_string()) - .env( - "PROFILE", - match unit.profile.root { - ProfileRoot::Release => "release", - ProfileRoot::Debug => "debug", - }, - ) - .env("HOST", &bcx.host_triple()) - .env("RUSTC", &bcx.rustc().path) - .env("RUSTDOC", &*bcx.config.rustdoc()?) - .inherit_jobserver(&cx.jobserver); - - if let Some(linker) = &bcx.target_data.target_config(unit.kind).linker { - cmd.env( - "RUSTC_LINKER", - linker.val.clone().resolve_program(bcx.config), - ); - } - - if let Some(links) = unit.pkg.manifest().links() { - cmd.env("CARGO_MANIFEST_LINKS", links); - } - - // Be sure to pass along all enabled features for this package, this is the - // last piece of statically known information that we have. - for feat in &unit.features { - cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); - } - - let mut cfg_map = HashMap::new(); - for cfg in bcx.target_data.cfg(unit.kind) { - match *cfg { - Cfg::Name(ref n) => { - cfg_map.insert(n.clone(), None); - } - Cfg::KeyPair(ref k, ref v) => { - if let Some(ref mut values) = - *cfg_map.entry(k.clone()).or_insert_with(|| Some(Vec::new())) - { - values.push(v.clone()) - } - } - } - } - for (k, v) in cfg_map { - if k == "debug_assertions" { - // This cfg is always true and misleading, so avoid setting it. - // That is because Cargo queries rustc without any profile settings. - continue; - } - let k = format!("CARGO_CFG_{}", super::envify(&k)); - match v { - Some(list) => { - cmd.env(&k, list.join(",")); - } - None => { - cmd.env(&k, ""); - } - } - } - - // Also inform the build script of the rustc compiler context. - if let Some(wrapper) = bcx.rustc().wrapper.as_ref() { - cmd.env("RUSTC_WRAPPER", wrapper); - } else { - cmd.env_remove("RUSTC_WRAPPER"); - } - cmd.env_remove("RUSTC_WORKSPACE_WRAPPER"); - if cx.bcx.ws.is_member(&unit.pkg) { - if let Some(wrapper) = bcx.rustc().workspace_wrapper.as_ref() { - cmd.env("RUSTC_WORKSPACE_WRAPPER", wrapper); - } - } - cmd.env( - "CARGO_ENCODED_RUSTFLAGS", - bcx.rustflags_args(unit).join("\x1f"), - ); - cmd.env_remove("RUSTFLAGS"); - - // Gather the set of native dependencies that this package has along with - // some other variables to close over. - // - // This information will be used at build-time later on to figure out which - // sorts of variables need to be discovered at that time. - let lib_deps = dependencies - .iter() - .filter_map(|dep| { - if dep.unit.mode.is_run_custom_build() { - let dep_metadata = cx.get_run_build_script_metadata(&dep.unit); - Some(( - dep.unit.pkg.manifest().links().unwrap().to_string(), - dep.unit.pkg.package_id(), - dep_metadata, - )) - } else { - None - } - }) - .collect::>(); - let library_name = unit.pkg.library().map(|t| t.crate_name()); - let pkg_descr = unit.pkg.to_string(); - let build_script_outputs = Arc::clone(&cx.build_script_outputs); - let id = unit.pkg.package_id(); - let output_file = script_run_dir.join("output"); - let err_file = script_run_dir.join("stderr"); - let root_output_file = script_run_dir.join("root-output"); - let host_target_root = cx.files().host_dest().to_path_buf(); - let all = ( - id, - library_name.clone(), - pkg_descr.clone(), - Arc::clone(&build_script_outputs), - output_file.clone(), - script_out_dir.clone(), - ); - let build_scripts = cx.build_scripts.get(unit).cloned(); - let json_messages = bcx.build_config.emit_json(); - let extra_verbose = bcx.config.extra_verbose(); - let (prev_output, prev_script_out_dir) = prev_build_output(cx, unit); - let metadata_hash = cx.get_run_build_script_metadata(unit); - - paths::create_dir_all(&script_dir)?; - paths::create_dir_all(&script_out_dir)?; - - let nightly_features_allowed = cx.bcx.config.nightly_features_allowed; - let targets: Vec = unit.pkg.targets().to_vec(); - // Need a separate copy for the fresh closure. - let targets_fresh = targets.clone(); - - // Prepare the unit of "dirty work" which will actually run the custom build - // command. - // - // Note that this has to do some extra work just before running the command - // to determine extra environment variables and such. - let dirty = Work::new(move |state| { - // Make sure that OUT_DIR exists. - // - // If we have an old build directory, then just move it into place, - // otherwise create it! - paths::create_dir_all(&script_out_dir) - .with_context(|| "failed to create script output directory for build command")?; - - // For all our native lib dependencies, pick up their metadata to pass - // along to this custom build command. We're also careful to augment our - // dynamic library search path in case the build script depended on any - // native dynamic libraries. - if !build_plan { - let build_script_outputs = build_script_outputs.lock().unwrap(); - for (name, dep_id, dep_metadata) in lib_deps { - let script_output = build_script_outputs.get(dep_metadata).ok_or_else(|| { - internal(format!( - "failed to locate build state for env vars: {}/{}", - dep_id, dep_metadata - )) - })?; - let data = &script_output.metadata; - for &(ref key, ref value) in data.iter() { - cmd.env( - &format!("DEP_{}_{}", super::envify(&name), super::envify(key)), - value, - ); - } - } - if let Some(build_scripts) = build_scripts { - super::add_plugin_deps( - &mut cmd, - &build_script_outputs, - &build_scripts, - &host_target_root, - )?; - } - } - - if build_plan { - state.build_plan(invocation_name, cmd.clone(), Arc::new(Vec::new())); - return Ok(()); - } - - // And now finally, run the build command itself! - state.running(&cmd); - let timestamp = paths::set_invocation_time(&script_run_dir)?; - let prefix = format!("[{} {}] ", id.name(), id.version()); - let mut warnings_in_case_of_panic = Vec::new(); - let output = cmd - .exec_with_streaming( - &mut |stdout| { - if let Some(warning) = stdout.strip_prefix(CARGO_WARNING) { - warnings_in_case_of_panic.push(warning.to_owned()); - } - if extra_verbose { - state.stdout(format!("{}{}", prefix, stdout))?; - } - Ok(()) - }, - &mut |stderr| { - if extra_verbose { - state.stderr(format!("{}{}", prefix, stderr))?; - } - Ok(()) - }, - true, - ) - .with_context(|| format!("failed to run custom build command for `{}`", pkg_descr)); - - if let Err(error) = output { - insert_warnings_in_build_outputs( - build_script_outputs, - id, - metadata_hash, - warnings_in_case_of_panic, - ); - return Err(error); - } - - let output = output.unwrap(); - - // After the build command has finished running, we need to be sure to - // remember all of its output so we can later discover precisely what it - // was, even if we don't run the build command again (due to freshness). - // - // This is also the location where we provide feedback into the build - // state informing what variables were discovered via our script as - // well. - paths::write(&output_file, &output.stdout)?; - // This mtime shift allows Cargo to detect if a source file was - // modified in the middle of the build. - paths::set_file_time_no_err(output_file, timestamp); - paths::write(&err_file, &output.stderr)?; - paths::write(&root_output_file, paths::path2bytes(&script_out_dir)?)?; - let parsed_output = BuildOutput::parse( - &output.stdout, - library_name, - &pkg_descr, - &script_out_dir, - &script_out_dir, - nightly_features_allowed, - &targets, - )?; - - if json_messages { - emit_build_output(state, &parsed_output, script_out_dir.as_path(), id)?; - } - build_script_outputs - .lock() - .unwrap() - .insert(id, metadata_hash, parsed_output); - Ok(()) - }); - - // Now that we've prepared our work-to-do, we need to prepare the fresh work - // itself to run when we actually end up just discarding what we calculated - // above. - let fresh = Work::new(move |state| { - let (id, library_name, pkg_descr, build_script_outputs, output_file, script_out_dir) = all; - let output = match prev_output { - Some(output) => output, - None => BuildOutput::parse_file( - &output_file, - library_name, - &pkg_descr, - &prev_script_out_dir, - &script_out_dir, - nightly_features_allowed, - &targets_fresh, - )?, - }; - - if json_messages { - emit_build_output(state, &output, script_out_dir.as_path(), id)?; - } - - build_script_outputs - .lock() - .unwrap() - .insert(id, metadata_hash, output); - Ok(()) - }); - - let mut job = if cx.bcx.build_config.build_plan { - Job::new_dirty(Work::noop()) - } else { - fingerprint::prepare_target(cx, unit, false)? - }; - if job.freshness() == Freshness::Dirty { - job.before(dirty); - } else { - job.before(fresh); - } - Ok(job) -} - -fn insert_warnings_in_build_outputs( - build_script_outputs: Arc>, - id: PackageId, - metadata_hash: Metadata, - warnings: Vec, -) { - let build_output_with_only_warnings = BuildOutput { - warnings, - ..BuildOutput::default() - }; - build_script_outputs - .lock() - .unwrap() - .insert(id, metadata_hash, build_output_with_only_warnings); -} - -impl BuildOutput { - pub fn parse_file( - path: &Path, - library_name: Option, - pkg_descr: &str, - script_out_dir_when_generated: &Path, - script_out_dir: &Path, - nightly_features_allowed: bool, - targets: &[Target], - ) -> CargoResult { - let contents = paths::read_bytes(path)?; - BuildOutput::parse( - &contents, - library_name, - pkg_descr, - script_out_dir_when_generated, - script_out_dir, - nightly_features_allowed, - targets, - ) - } - - // Parses the output of a script. - // The `pkg_descr` is used for error messages. - // The `library_name` is used for determining if RUSTC_BOOTSTRAP should be allowed. - pub fn parse( - input: &[u8], - // Takes String instead of InternedString so passing `unit.pkg.name()` will give a compile error. - library_name: Option, - pkg_descr: &str, - script_out_dir_when_generated: &Path, - script_out_dir: &Path, - nightly_features_allowed: bool, - targets: &[Target], - ) -> CargoResult { - let mut library_paths = Vec::new(); - let mut library_links = Vec::new(); - let mut linker_args = Vec::new(); - let mut cfgs = Vec::new(); - let mut env = Vec::new(); - let mut metadata = Vec::new(); - let mut rerun_if_changed = Vec::new(); - let mut rerun_if_env_changed = Vec::new(); - let mut warnings = Vec::new(); - let whence = format!("build script of `{}`", pkg_descr); - - for line in input.split(|b| *b == b'\n') { - let line = match str::from_utf8(line) { - Ok(line) => line.trim(), - Err(..) => continue, - }; - let mut iter = line.splitn(2, ':'); - if iter.next() != Some("cargo") { - // skip this line since it doesn't start with "cargo:" - continue; - } - let data = match iter.next() { - Some(val) => val, - None => continue, - }; - - // getting the `key=value` part of the line - let mut iter = data.splitn(2, '='); - let key = iter.next(); - let value = iter.next(); - let (key, value) = match (key, value) { - (Some(a), Some(b)) => (a, b.trim_end()), - // Line started with `cargo:` but didn't match `key=value`. - _ => bail!("invalid output in {}: `{}`\n\ - Expected a line with `cargo:key=value` with an `=` character, \ - but none was found.\n\ - See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \ - for more information about build script outputs.", whence, line), - }; - - // This will rewrite paths if the target directory has been moved. - let value = value.replace( - script_out_dir_when_generated.to_str().unwrap(), - script_out_dir.to_str().unwrap(), - ); - - // Keep in sync with TargetConfig::parse_links_overrides. - match key { - "rustc-flags" => { - let (paths, links) = BuildOutput::parse_rustc_flags(&value, &whence)?; - library_links.extend(links.into_iter()); - library_paths.extend(paths.into_iter()); - } - "rustc-link-lib" => library_links.push(value.to_string()), - "rustc-link-search" => library_paths.push(PathBuf::from(value)), - "rustc-link-arg-cdylib" | "rustc-cdylib-link-arg" => { - if !targets.iter().any(|target| target.is_cdylib()) { - warnings.push(format!( - "cargo:{} was specified in the build script of {}, \ - but that package does not contain a cdylib target\n\ - \n\ - Allowing this was an unintended change in the 1.50 \ - release, and may become an error in the future. \ - For more information, see \ - .", - key, pkg_descr - )); - } - linker_args.push((LinkType::Cdylib, value)) - } - "rustc-link-arg-bins" => { - if !targets.iter().any(|target| target.is_bin()) { - bail!( - "invalid instruction `cargo:{}` from {}\n\ - The package {} does not have a bin target.", - key, - whence, - pkg_descr - ); - } - linker_args.push((LinkType::Bin, value)); - } - "rustc-link-arg-bin" => { - let mut parts = value.splitn(2, '='); - let bin_name = parts.next().unwrap().to_string(); - let arg = parts.next().ok_or_else(|| { - anyhow::format_err!( - "invalid instruction `cargo:{}={}` from {}\n\ - The instruction should have the form cargo:{}=BIN=ARG", - key, - value, - whence, - key - ) - })?; - if !targets - .iter() - .any(|target| target.is_bin() && target.name() == bin_name) - { - bail!( - "invalid instruction `cargo:{}` from {}\n\ - The package {} does not have a bin target with the name `{}`.", - key, - whence, - pkg_descr, - bin_name - ); - } - linker_args.push((LinkType::SingleBin(bin_name), arg.to_string())); - } - "rustc-link-arg" => { - linker_args.push((LinkType::All, value)); - } - "rustc-cfg" => cfgs.push(value.to_string()), - "rustc-env" => { - let (key, val) = BuildOutput::parse_rustc_env(&value, &whence)?; - // Build scripts aren't allowed to set RUSTC_BOOTSTRAP. - // See https://github.com/rust-lang/cargo/issues/7088. - if key == "RUSTC_BOOTSTRAP" { - // If RUSTC_BOOTSTRAP is already set, the user of Cargo knows about - // bootstrap and still wants to override the channel. Give them a way to do - // so, but still emit a warning that the current crate shouldn't be trying - // to set RUSTC_BOOTSTRAP. - // If this is a nightly build, setting RUSTC_BOOTSTRAP wouldn't affect the - // behavior, so still only give a warning. - // NOTE: cargo only allows nightly features on RUSTC_BOOTSTRAP=1, but we - // want setting any value of RUSTC_BOOTSTRAP to downgrade this to a warning - // (so that `RUSTC_BOOTSTRAP=library_name` will work) - let rustc_bootstrap_allows = |name: Option<&str>| { - let name = match name { - // as of 2021, no binaries on crates.io use RUSTC_BOOTSTRAP, so - // fine-grained opt-outs aren't needed. end-users can always use - // RUSTC_BOOTSTRAP=1 from the top-level if it's really a problem. - None => return false, - Some(n) => n, - }; - std::env::var("RUSTC_BOOTSTRAP") - .map_or(false, |var| var.split(',').any(|s| s == name)) - }; - if nightly_features_allowed - || rustc_bootstrap_allows(library_name.as_deref()) - { - warnings.push(format!("Cannot set `RUSTC_BOOTSTRAP={}` from {}.\n\ - note: Crates cannot set `RUSTC_BOOTSTRAP` themselves, as doing so would subvert the stability guarantees of Rust for your project.", - val, whence - )); - } else { - // Setting RUSTC_BOOTSTRAP would change the behavior of the crate. - // Abort with an error. - bail!("Cannot set `RUSTC_BOOTSTRAP={}` from {}.\n\ - note: Crates cannot set `RUSTC_BOOTSTRAP` themselves, as doing so would subvert the stability guarantees of Rust for your project.\n\ - help: If you're sure you want to do this in your project, set the environment variable `RUSTC_BOOTSTRAP={}` before running cargo instead.", - val, - whence, - library_name.as_deref().unwrap_or("1"), - ); - } - } else { - env.push((key, val)); - } - } - "warning" => warnings.push(value.to_string()), - "rerun-if-changed" => rerun_if_changed.push(PathBuf::from(value)), - "rerun-if-env-changed" => rerun_if_env_changed.push(value.to_string()), - _ => metadata.push((key.to_string(), value.to_string())), - } - } - - Ok(BuildOutput { - library_paths, - library_links, - linker_args, - cfgs, - env, - metadata, - rerun_if_changed, - rerun_if_env_changed, - warnings, - }) - } - - pub fn parse_rustc_flags( - value: &str, - whence: &str, - ) -> CargoResult<(Vec, Vec)> { - let value = value.trim(); - let mut flags_iter = value - .split(|c: char| c.is_whitespace()) - .filter(|w| w.chars().any(|c| !c.is_whitespace())); - let (mut library_paths, mut library_links) = (Vec::new(), Vec::new()); - - while let Some(flag) = flags_iter.next() { - if flag.starts_with("-l") || flag.starts_with("-L") { - // Check if this flag has no space before the value as is - // common with tools like pkg-config - // e.g. -L/some/dir/local/lib or -licui18n - let (flag, mut value) = flag.split_at(2); - if value.is_empty() { - value = match flags_iter.next() { - Some(v) => v, - None => bail! { - "Flag in rustc-flags has no value in {}: {}", - whence, - value - }, - } - } - - match flag { - "-l" => library_links.push(value.to_string()), - "-L" => library_paths.push(PathBuf::from(value)), - - // This was already checked above - _ => unreachable!(), - }; - } else { - bail!( - "Only `-l` and `-L` flags are allowed in {}: `{}`", - whence, - value - ) - } - } - Ok((library_paths, library_links)) - } - - pub fn parse_rustc_env(value: &str, whence: &str) -> CargoResult<(String, String)> { - let mut iter = value.splitn(2, '='); - let name = iter.next(); - let val = iter.next(); - match (name, val) { - (Some(n), Some(v)) => Ok((n.to_owned(), v.to_owned())), - _ => bail!("Variable rustc-env has no value in {}: {}", whence, value), - } - } -} - -fn prepare_metabuild(cx: &Context<'_, '_>, unit: &Unit, deps: &[String]) -> CargoResult<()> { - let mut output = Vec::new(); - let available_deps = cx.unit_deps(unit); - // Filter out optional dependencies, and look up the actual lib name. - let meta_deps: Vec<_> = deps - .iter() - .filter_map(|name| { - available_deps - .iter() - .find(|d| d.unit.pkg.name().as_str() == name.as_str()) - .map(|d| d.unit.target.crate_name()) - }) - .collect(); - for dep in &meta_deps { - output.push(format!("use {};\n", dep)); - } - output.push("fn main() {\n".to_string()); - for dep in &meta_deps { - output.push(format!(" {}::metabuild();\n", dep)); - } - output.push("}\n".to_string()); - let output = output.join(""); - let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); - paths::create_dir_all(path.parent().unwrap())?; - paths::write_if_changed(path, &output)?; - Ok(()) -} - -impl BuildDeps { - pub fn new(output_file: &Path, output: Option<&BuildOutput>) -> BuildDeps { - BuildDeps { - build_script_output: output_file.to_path_buf(), - rerun_if_changed: output - .map(|p| &p.rerun_if_changed) - .cloned() - .unwrap_or_default(), - rerun_if_env_changed: output - .map(|p| &p.rerun_if_env_changed) - .cloned() - .unwrap_or_default(), - } - } -} - -/// Computes several maps in `Context`: -/// - `build_scripts`: A map that tracks which build scripts each package -/// depends on. -/// - `build_explicit_deps`: Dependency statements emitted by build scripts -/// from a previous run. -/// - `build_script_outputs`: Pre-populates this with any overridden build -/// scripts. -/// -/// The important one here is `build_scripts`, which for each `(package, -/// metadata)` stores a `BuildScripts` object which contains a list of -/// dependencies with build scripts that the unit should consider when -/// linking. For example this lists all dependencies' `-L` flags which need to -/// be propagated transitively. -/// -/// The given set of units to this function is the initial set of -/// targets/profiles which are being built. -pub fn build_map(cx: &mut Context<'_, '_>) -> CargoResult<()> { - let mut ret = HashMap::new(); - for unit in &cx.bcx.roots { - build(&mut ret, cx, unit)?; - } - cx.build_scripts - .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v)))); - return Ok(()); - - // Recursive function to build up the map we're constructing. This function - // memoizes all of its return values as it goes along. - fn build<'a>( - out: &'a mut HashMap, - cx: &mut Context<'_, '_>, - unit: &Unit, - ) -> CargoResult<&'a BuildScripts> { - // Do a quick pre-flight check to see if we've already calculated the - // set of dependencies. - if out.contains_key(unit) { - return Ok(&out[unit]); - } - - // If there is a build script override, pre-fill the build output. - if unit.mode.is_run_custom_build() { - if let Some(links) = unit.pkg.manifest().links() { - if let Some(output) = cx.bcx.target_data.script_override(links, unit.kind) { - let metadata = cx.get_run_build_script_metadata(unit); - cx.build_script_outputs.lock().unwrap().insert( - unit.pkg.package_id(), - metadata, - output.clone(), - ); - } - } - } - - let mut ret = BuildScripts::default(); - - // If a package has a build script, add itself as something to inspect for linking. - if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { - let script_meta = cx - .find_build_script_metadata(unit) - .expect("has_custom_build should have RunCustomBuild"); - add_to_link(&mut ret, unit.pkg.package_id(), script_meta); - } - - // Load any dependency declarations from a previous run. - if unit.mode.is_run_custom_build() { - parse_previous_explicit_deps(cx, unit); - } - - // We want to invoke the compiler deterministically to be cache-friendly - // to rustc invocation caching schemes, so be sure to generate the same - // set of build script dependency orderings via sorting the targets that - // come out of the `Context`. - let mut dependencies: Vec = - cx.unit_deps(unit).iter().map(|d| d.unit.clone()).collect(); - dependencies.sort_by_key(|u| u.pkg.package_id()); - - for dep_unit in dependencies.iter() { - let dep_scripts = build(out, cx, dep_unit)?; - - if dep_unit.target.for_host() { - ret.plugins.extend(dep_scripts.to_link.iter().cloned()); - } else if dep_unit.target.is_linkable() { - for &(pkg, metadata) in dep_scripts.to_link.iter() { - add_to_link(&mut ret, pkg, metadata); - } - } - } - - match out.entry(unit.clone()) { - Entry::Vacant(entry) => Ok(entry.insert(ret)), - Entry::Occupied(_) => panic!("cyclic dependencies in `build_map`"), - } - } - - // When adding an entry to 'to_link' we only actually push it on if the - // script hasn't seen it yet (e.g., we don't push on duplicates). - fn add_to_link(scripts: &mut BuildScripts, pkg: PackageId, metadata: Metadata) { - if scripts.seen_to_link.insert((pkg, metadata)) { - scripts.to_link.push((pkg, metadata)); - } - } - - fn parse_previous_explicit_deps(cx: &mut Context<'_, '_>, unit: &Unit) { - let script_run_dir = cx.files().build_script_run_dir(unit); - let output_file = script_run_dir.join("output"); - let (prev_output, _) = prev_build_output(cx, unit); - let deps = BuildDeps::new(&output_file, prev_output.as_ref()); - cx.build_explicit_deps.insert(unit.clone(), deps); - } -} - -/// Returns the previous parsed `BuildOutput`, if any, from a previous -/// execution. -/// -/// Also returns the directory containing the output, typically used later in -/// processing. -fn prev_build_output(cx: &mut Context<'_, '_>, unit: &Unit) -> (Option, PathBuf) { - let script_out_dir = cx.files().build_script_out_dir(unit); - let script_run_dir = cx.files().build_script_run_dir(unit); - let root_output_file = script_run_dir.join("root-output"); - let output_file = script_run_dir.join("output"); - - let prev_script_out_dir = paths::read_bytes(&root_output_file) - .and_then(|bytes| paths::bytes2path(&bytes)) - .unwrap_or_else(|_| script_out_dir.clone()); - - ( - BuildOutput::parse_file( - &output_file, - unit.pkg.library().map(|t| t.crate_name()), - &unit.pkg.to_string(), - &prev_script_out_dir, - &script_out_dir, - cx.bcx.config.nightly_features_allowed, - unit.pkg.targets(), - ) - .ok(), - prev_script_out_dir, - ) -} - -impl BuildScriptOutputs { - /// Inserts a new entry into the map. - fn insert(&mut self, pkg_id: PackageId, metadata: Metadata, parsed_output: BuildOutput) { - match self.outputs.entry(metadata) { - Entry::Vacant(entry) => { - entry.insert(parsed_output); - } - Entry::Occupied(entry) => panic!( - "build script output collision for {}/{}\n\ - old={:?}\nnew={:?}", - pkg_id, - metadata, - entry.get(), - parsed_output - ), - } - } - - /// Returns `true` if the given key already exists. - fn contains_key(&self, metadata: Metadata) -> bool { - self.outputs.contains_key(&metadata) - } - - /// Gets the build output for the given key. - pub fn get(&self, meta: Metadata) -> Option<&BuildOutput> { - self.outputs.get(&meta) - } - - /// Returns an iterator over all entries. - pub fn iter(&self) -> impl Iterator { - self.outputs.iter() - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/fingerprint.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/fingerprint.rs deleted file mode 100644 index e3aaf9c76..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/fingerprint.rs +++ /dev/null @@ -1,2063 +0,0 @@ -//! # Fingerprints -//! -//! This module implements change-tracking so that Cargo can know whether or -//! not something needs to be recompiled. A Cargo `Unit` can be either "dirty" -//! (needs to be recompiled) or "fresh" (it does not need to be recompiled). -//! There are several mechanisms that influence a Unit's freshness: -//! -//! - The `Fingerprint` is a hash, saved to the filesystem in the -//! `.fingerprint` directory, that tracks information about the Unit. If the -//! fingerprint is missing (such as the first time the unit is being -//! compiled), then the unit is dirty. If any of the fingerprint fields -//! change (like the name of the source file), then the Unit is considered -//! dirty. -//! -//! The `Fingerprint` also tracks the fingerprints of all its dependencies, -//! so a change in a dependency will propagate the "dirty" status up. -//! -//! - Filesystem mtime tracking is also used to check if a unit is dirty. -//! See the section below on "Mtime comparison" for more details. There -//! are essentially two parts to mtime tracking: -//! -//! 1. The mtime of a Unit's output files is compared to the mtime of all -//! its dependencies' output file mtimes (see `check_filesystem`). If any -//! output is missing, or is older than a dependency's output, then the -//! unit is dirty. -//! 2. The mtime of a Unit's source files is compared to the mtime of its -//! dep-info file in the fingerprint directory (see `find_stale_file`). -//! The dep-info file is used as an anchor to know when the last build of -//! the unit was done. See the "dep-info files" section below for more -//! details. If any input files are missing, or are newer than the -//! dep-info, then the unit is dirty. -//! -//! Note: Fingerprinting is not a perfect solution. Filesystem mtime tracking -//! is notoriously imprecise and problematic. Only a small part of the -//! environment is captured. This is a balance of performance, simplicity, and -//! completeness. Sandboxing, hashing file contents, tracking every file -//! access, environment variable, and network operation would ensure more -//! reliable and reproducible builds at the cost of being complex, slow, and -//! platform-dependent. -//! -//! ## Fingerprints and Metadata -//! -//! The `Metadata` hash is a hash added to the output filenames to isolate -//! each unit. See the documentation in the `compilation_files` module for -//! more details. NOTE: Not all output files are isolated via filename hashes -//! (like dylibs). The fingerprint directory uses a hash, but sometimes units -//! share the same fingerprint directory (when they don't have Metadata) so -//! care should be taken to handle this! -//! -//! Fingerprints and Metadata are similar, and track some of the same things. -//! The Metadata contains information that is required to keep Units separate. -//! The Fingerprint includes additional information that should cause a -//! recompile, but it is desired to reuse the same filenames. A comparison -//! of what is tracked: -//! -//! Value | Fingerprint | Metadata -//! -------------------------------------------|-------------|---------- -//! rustc | โœ“ | โœ“ -//! Profile | โœ“ | โœ“ -//! `cargo rustc` extra args | โœ“ | โœ“ -//! CompileMode | โœ“ | โœ“ -//! Target Name | โœ“ | โœ“ -//! TargetKind (bin/lib/etc.) | โœ“ | โœ“ -//! Enabled Features | โœ“ | โœ“ -//! Immediate dependencyโ€™s hashes | โœ“[^1] | โœ“ -//! CompileKind (host/target) | โœ“ | โœ“ -//! __CARGO_DEFAULT_LIB_METADATA[^4] | | โœ“ -//! package_id | | โœ“ -//! authors, description, homepage, repo | โœ“ | -//! Target src path relative to ws | โœ“ | -//! Target flags (test/bench/for_host/edition) | โœ“ | -//! -C incremental=โ€ฆ flag | โœ“ | -//! mtime of sources | โœ“[^3] | -//! RUSTFLAGS/RUSTDOCFLAGS | โœ“ | -//! LTO flags | โœ“ | โœ“ -//! config settings[^5] | โœ“ | -//! is_std | | โœ“ -//! -//! [^1]: Build script and bin dependencies are not included. -//! -//! [^3]: See below for details on mtime tracking. -//! -//! [^4]: `__CARGO_DEFAULT_LIB_METADATA` is set by rustbuild to embed the -//! release channel (bootstrap/stable/beta/nightly) in libstd. -//! -//! [^5]: Config settings that are not otherwise captured anywhere else. -//! Currently, this is only `doc.extern-map`. -//! -//! When deciding what should go in the Metadata vs the Fingerprint, consider -//! that some files (like dylibs) do not have a hash in their filename. Thus, -//! if a value changes, only the fingerprint will detect the change (consider, -//! for example, swapping between different features). Fields that are only in -//! Metadata generally aren't relevant to the fingerprint because they -//! fundamentally change the output (like target vs host changes the directory -//! where it is emitted). -//! -//! ## Fingerprint files -//! -//! Fingerprint information is stored in the -//! `target/{debug,release}/.fingerprint/` directory. Each Unit is stored in a -//! separate directory. Each Unit directory contains: -//! -//! - A file with a 16 hex-digit hash. This is the Fingerprint hash, used for -//! quick loading and comparison. -//! - A `.json` file that contains details about the Fingerprint. This is only -//! used to log details about *why* a fingerprint is considered dirty. -//! `CARGO_LOG=cargo::core::compiler::fingerprint=trace cargo build` can be -//! used to display this log information. -//! - A "dep-info" file which is a translation of rustc's `*.d` dep-info files -//! to a Cargo-specific format that tweaks file names and is optimized for -//! reading quickly. -//! - An `invoked.timestamp` file whose filesystem mtime is updated every time -//! the Unit is built. This is used for capturing the time when the build -//! starts, to detect if files are changed in the middle of the build. See -//! below for more details. -//! -//! Note that some units are a little different. A Unit for *running* a build -//! script or for `rustdoc` does not have a dep-info file (it's not -//! applicable). Build script `invoked.timestamp` files are in the build -//! output directory. -//! -//! ## Fingerprint calculation -//! -//! After the list of Units has been calculated, the Units are added to the -//! `JobQueue`. As each one is added, the fingerprint is calculated, and the -//! dirty/fresh status is recorded. A closure is used to update the fingerprint -//! on-disk when the Unit successfully finishes. The closure will recompute the -//! Fingerprint based on the updated information. If the Unit fails to compile, -//! the fingerprint is not updated. -//! -//! Fingerprints are cached in the `Context`. This makes computing -//! Fingerprints faster, but also is necessary for properly updating -//! dependency information. Since a Fingerprint includes the Fingerprints of -//! all dependencies, when it is updated, by using `Arc` clones, it -//! automatically picks up the updates to its dependencies. -//! -//! ### dep-info files -//! -//! Cargo passes the `--emit=dep-info` flag to `rustc` so that `rustc` will -//! generate a "dep info" file (with the `.d` extension). This is a -//! Makefile-like syntax that includes all of the source files used to build -//! the crate. This file is used by Cargo to know which files to check to see -//! if the crate will need to be rebuilt. -//! -//! After `rustc` exits successfully, Cargo will read the dep info file and -//! translate it into a binary format that is stored in the fingerprint -//! directory (`translate_dep_info`). The mtime of the fingerprint dep-info -//! file itself is used as the reference for comparing the source files to -//! determine if any of the source files have been modified (see below for -//! more detail). Note that Cargo parses the special `# env-var:...` comments in -//! dep-info files to learn about environment variables that the rustc compile -//! depends on. Cargo then later uses this to trigger a recompile if a -//! referenced env var changes (even if the source didn't change). -//! -//! There is also a third dep-info file. Cargo will extend the file created by -//! rustc with some additional information and saves this into the output -//! directory. This is intended for build system integration. See the -//! `output_depinfo` module for more detail. -//! -//! #### -Zbinary-dep-depinfo -//! -//! `rustc` has an experimental flag `-Zbinary-dep-depinfo`. This causes -//! `rustc` to include binary files (like rlibs) in the dep-info file. This is -//! primarily to support rustc development, so that Cargo can check the -//! implicit dependency to the standard library (which lives in the sysroot). -//! We want Cargo to recompile whenever the standard library rlib/dylibs -//! change, and this is a generic mechanism to make that work. -//! -//! ### Mtime comparison -//! -//! The use of modification timestamps is the most common way a unit will be -//! determined to be dirty or fresh between builds. There are many subtle -//! issues and edge cases with mtime comparisons. This gives a high-level -//! overview, but you'll need to read the code for the gritty details. Mtime -//! handling is different for different unit kinds. The different styles are -//! driven by the `Fingerprint.local` field, which is set based on the unit -//! kind. -//! -//! The status of whether or not the mtime is "stale" or "up-to-date" is -//! stored in `Fingerprint.fs_status`. -//! -//! All units will compare the mtime of its newest output file with the mtimes -//! of the outputs of all its dependencies. If any output file is missing, -//! then the unit is stale. If any dependency is newer, the unit is stale. -//! -//! #### Normal package mtime handling -//! -//! `LocalFingerprint::CheckDepinfo` is used for checking the mtime of -//! packages. It compares the mtime of the input files (the source files) to -//! the mtime of the dep-info file (which is written last after a build is -//! finished). If the dep-info is missing, the unit is stale (it has never -//! been built). The list of input files comes from the dep-info file. See the -//! section above for details on dep-info files. -//! -//! Also note that although registry and git packages use `CheckDepInfo`, none -//! of their source files are included in the dep-info (see -//! `translate_dep_info`), so for those kinds no mtime checking is done -//! (unless `-Zbinary-dep-depinfo` is used). Repository and git packages are -//! static, so there is no need to check anything. -//! -//! When a build is complete, the mtime of the dep-info file in the -//! fingerprint directory is modified to rewind it to the time when the build -//! started. This is done by creating an `invoked.timestamp` file when the -//! build starts to capture the start time. The mtime is rewound to the start -//! to handle the case where the user modifies a source file while a build is -//! running. Cargo can't know whether or not the file was included in the -//! build, so it takes a conservative approach of assuming the file was *not* -//! included, and it should be rebuilt during the next build. -//! -//! #### Rustdoc mtime handling -//! -//! Rustdoc does not emit a dep-info file, so Cargo currently has a relatively -//! simple system for detecting rebuilds. `LocalFingerprint::Precalculated` is -//! used for rustdoc units. For registry packages, this is the package -//! version. For git packages, it is the git hash. For path packages, it is -//! the a string of the mtime of the newest file in the package. -//! -//! There are some known bugs with how this works, so it should be improved at -//! some point. -//! -//! #### Build script mtime handling -//! -//! Build script mtime handling runs in different modes. There is the "old -//! style" where the build script does not emit any `rerun-if` directives. In -//! this mode, Cargo will use `LocalFingerprint::Precalculated`. See the -//! "rustdoc" section above how it works. -//! -//! In the new-style, each `rerun-if` directive is translated to the -//! corresponding `LocalFingerprint` variant. The `RerunIfChanged` variant -//! compares the mtime of the given filenames against the mtime of the -//! "output" file. -//! -//! Similar to normal units, the build script "output" file mtime is rewound -//! to the time just before the build script is executed to handle mid-build -//! modifications. -//! -//! ## Considerations for inclusion in a fingerprint -//! -//! Over time we've realized a few items which historically were included in -//! fingerprint hashings should not actually be included. Examples are: -//! -//! * Modification time values. We strive to never include a modification time -//! inside a `Fingerprint` to get hashed into an actual value. While -//! theoretically fine to do, in practice this causes issues with common -//! applications like Docker. Docker, after a layer is built, will zero out -//! the nanosecond part of all filesystem modification times. This means that -//! the actual modification time is different for all build artifacts, which -//! if we tracked the actual values of modification times would cause -//! unnecessary recompiles. To fix this we instead only track paths which are -//! relevant. These paths are checked dynamically to see if they're up to -//! date, and the modification time doesn't make its way into the fingerprint -//! hash. -//! -//! * Absolute path names. We strive to maintain a property where if you rename -//! a project directory Cargo will continue to preserve all build artifacts -//! and reuse the cache. This means that we can't ever hash an absolute path -//! name. Instead we always hash relative path names and the "root" is passed -//! in at runtime dynamically. Some of this is best effort, but the general -//! idea is that we assume all accesses within a crate stay within that -//! crate. -//! -//! These are pretty tricky to test for unfortunately, but we should have a good -//! test suite nowadays and lord knows Cargo gets enough testing in the wild! -//! -//! ## Build scripts -//! -//! The *running* of a build script (`CompileMode::RunCustomBuild`) is treated -//! significantly different than all other Unit kinds. It has its own function -//! for calculating the Fingerprint (`calculate_run_custom_build`) and has some -//! unique considerations. It does not track the same information as a normal -//! Unit. The information tracked depends on the `rerun-if-changed` and -//! `rerun-if-env-changed` statements produced by the build script. If the -//! script does not emit either of these statements, the Fingerprint runs in -//! "old style" mode where an mtime change of *any* file in the package will -//! cause the build script to be re-run. Otherwise, the fingerprint *only* -//! tracks the individual "rerun-if" items listed by the build script. -//! -//! The "rerun-if" statements from a *previous* build are stored in the build -//! output directory in a file called `output`. Cargo parses this file when -//! the Unit for that build script is prepared for the `JobQueue`. The -//! Fingerprint code can then use that information to compute the Fingerprint -//! and compare against the old fingerprint hash. -//! -//! Care must be taken with build script Fingerprints because the -//! `Fingerprint::local` value may be changed after the build script runs -//! (such as if the build script adds or removes "rerun-if" items). -//! -//! Another complication is if a build script is overridden. In that case, the -//! fingerprint is the hash of the output of the override. -//! -//! ## Special considerations -//! -//! Registry dependencies do not track the mtime of files. This is because -//! registry dependencies are not expected to change (if a new version is -//! used, the Package ID will change, causing a rebuild). Cargo currently -//! partially works with Docker caching. When a Docker image is built, it has -//! normal mtime information. However, when a step is cached, the nanosecond -//! portions of all files is zeroed out. Currently this works, but care must -//! be taken for situations like these. -//! -//! HFS on macOS only supports 1 second timestamps. This causes a significant -//! number of problems, particularly with Cargo's testsuite which does rapid -//! builds in succession. Other filesystems have various degrees of -//! resolution. -//! -//! Various weird filesystems (such as network filesystems) also can cause -//! complications. Network filesystems may track the time on the server -//! (except when the time is set manually such as with -//! `filetime::set_file_times`). Not all filesystems support modifying the -//! mtime. -//! -//! See the `A-rebuild-detection` flag on the issue tracker for more: -//! - -use std::collections::hash_map::{Entry, HashMap}; -use std::convert::TryInto; -use std::env; -use std::hash::{self, Hash, Hasher}; -use std::io; -use std::path::{Path, PathBuf}; -use std::str; -use std::sync::{Arc, Mutex}; -use std::time::SystemTime; - -use anyhow::{bail, format_err, Context as _}; -use cargo_util::{paths, ProcessBuilder}; -use filetime::FileTime; -use log::{debug, info}; -use serde::de; -use serde::ser; -use serde::{Deserialize, Serialize}; - -use crate::core::compiler::unit_graph::UnitDep; -use crate::core::Package; -use crate::util; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::{internal, path_args, profile, StableHasher}; -use crate::CARGO_ENV; - -use super::custom_build::BuildDeps; -use super::job::{Job, Work}; -use super::{BuildContext, Context, FileFlavor, Unit}; - -/// Determines if a `unit` is up-to-date, and if not prepares necessary work to -/// update the persisted fingerprint. -/// -/// This function will inspect `unit`, calculate a fingerprint for it, and then -/// return an appropriate `Job` to run. The returned `Job` will be a noop if -/// `unit` is considered "fresh", or if it was previously built and cached. -/// Otherwise the `Job` returned will write out the true fingerprint to the -/// filesystem, to be executed after the unit's work has completed. -/// -/// The `force` flag is a way to force the `Job` to be "dirty", or always -/// update the fingerprint. **Beware using this flag** because it does not -/// transitively propagate throughout the dependency graph, it only forces this -/// one unit which is very unlikely to be what you want unless you're -/// exclusively talking about top-level units. -pub fn prepare_target(cx: &mut Context<'_, '_>, unit: &Unit, force: bool) -> CargoResult { - let _p = profile::start(format!( - "fingerprint: {} / {}", - unit.pkg.package_id(), - unit.target.name() - )); - let bcx = cx.bcx; - let loc = cx.files().fingerprint_file_path(unit, ""); - - debug!("fingerprint at: {}", loc.display()); - - // Figure out if this unit is up to date. After calculating the fingerprint - // compare it to an old version, if any, and attempt to print diagnostic - // information about failed comparisons to aid in debugging. - let fingerprint = calculate(cx, unit)?; - let mtime_on_use = cx.bcx.config.cli_unstable().mtime_on_use; - let compare = compare_old_fingerprint(&loc, &*fingerprint, mtime_on_use); - log_compare(unit, &compare); - - // If our comparison failed (e.g., we're going to trigger a rebuild of this - // crate), then we also ensure the source of the crate passes all - // verification checks before we build it. - // - // The `Source::verify` method is intended to allow sources to execute - // pre-build checks to ensure that the relevant source code is all - // up-to-date and as expected. This is currently used primarily for - // directory sources which will use this hook to perform an integrity check - // on all files in the source to ensure they haven't changed. If they have - // changed then an error is issued. - if compare.is_err() { - let source_id = unit.pkg.package_id().source_id(); - let sources = bcx.packages.sources(); - let source = sources - .get(source_id) - .ok_or_else(|| internal("missing package source"))?; - source.verify(unit.pkg.package_id())?; - } - - if compare.is_ok() && !force { - return Ok(Job::new_fresh()); - } - - // Clear out the old fingerprint file if it exists. This protects when - // compilation is interrupted leaving a corrupt file. For example, a - // project with a lib.rs and integration test (two units): - // - // 1. Build the library and integration test. - // 2. Make a change to lib.rs (NOT the integration test). - // 3. Build the integration test, hit Ctrl-C while linking. With gcc, this - // will leave behind an incomplete executable (zero size, or partially - // written). NOTE: The library builds successfully, it is the linking - // of the integration test that we are interrupting. - // 4. Build the integration test again. - // - // Without the following line, then step 3 will leave a valid fingerprint - // on the disk. Then step 4 will think the integration test is "fresh" - // because: - // - // - There is a valid fingerprint hash on disk (written in step 1). - // - The mtime of the output file (the corrupt integration executable - // written in step 3) is newer than all of its dependencies. - // - The mtime of the integration test fingerprint dep-info file (written - // in step 1) is newer than the integration test's source files, because - // we haven't modified any of its source files. - // - // But the executable is corrupt and needs to be rebuilt. Clearing the - // fingerprint at step 3 ensures that Cargo never mistakes a partially - // written output as up-to-date. - if loc.exists() { - // Truncate instead of delete so that compare_old_fingerprint will - // still log the reason for the fingerprint failure instead of just - // reporting "failed to read fingerprint" during the next build if - // this build fails. - paths::write(&loc, b"")?; - } - - let write_fingerprint = if unit.mode.is_run_custom_build() { - // For build scripts the `local` field of the fingerprint may change - // while we're executing it. For example it could be in the legacy - // "consider everything a dependency mode" and then we switch to "deps - // are explicitly specified" mode. - // - // To handle this movement we need to regenerate the `local` field of a - // build script's fingerprint after it's executed. We do this by - // using the `build_script_local_fingerprints` function which returns a - // thunk we can invoke on a foreign thread to calculate this. - let build_script_outputs = Arc::clone(&cx.build_script_outputs); - let metadata = cx.get_run_build_script_metadata(unit); - let (gen_local, _overridden) = build_script_local_fingerprints(cx, unit); - let output_path = cx.build_explicit_deps[unit].build_script_output.clone(); - Work::new(move |_| { - let outputs = build_script_outputs.lock().unwrap(); - let output = outputs - .get(metadata) - .expect("output must exist after running"); - let deps = BuildDeps::new(&output_path, Some(output)); - - // FIXME: it's basically buggy that we pass `None` to `call_box` - // here. See documentation on `build_script_local_fingerprints` - // below for more information. Despite this just try to proceed and - // hobble along if it happens to return `Some`. - if let Some(new_local) = (gen_local)(&deps, None)? { - *fingerprint.local.lock().unwrap() = new_local; - } - - write_fingerprint(&loc, &fingerprint) - }) - } else { - Work::new(move |_| write_fingerprint(&loc, &fingerprint)) - }; - - Ok(Job::new_dirty(write_fingerprint)) -} - -/// Dependency edge information for fingerprints. This is generated for each -/// dependency and is stored in a `Fingerprint` below. -#[derive(Clone)] -struct DepFingerprint { - /// The hash of the package id that this dependency points to - pkg_id: u64, - /// The crate name we're using for this dependency, which if we change we'll - /// need to recompile! - name: InternedString, - /// Whether or not this dependency is flagged as a public dependency or not. - public: bool, - /// Whether or not this dependency is an rmeta dependency or a "full" - /// dependency. In the case of an rmeta dependency our dependency edge only - /// actually requires the rmeta from what we depend on, so when checking - /// mtime information all files other than the rmeta can be ignored. - only_requires_rmeta: bool, - /// The dependency's fingerprint we recursively point to, containing all the - /// other hash information we'd otherwise need. - fingerprint: Arc, -} - -/// A fingerprint can be considered to be a "short string" representing the -/// state of a world for a package. -/// -/// If a fingerprint ever changes, then the package itself needs to be -/// recompiled. Inputs to the fingerprint include source code modifications, -/// compiler flags, compiler version, etc. This structure is not simply a -/// `String` due to the fact that some fingerprints cannot be calculated lazily. -/// -/// Path sources, for example, use the mtime of the corresponding dep-info file -/// as a fingerprint (all source files must be modified *before* this mtime). -/// This dep-info file is not generated, however, until after the crate is -/// compiled. As a result, this structure can be thought of as a fingerprint -/// to-be. The actual value can be calculated via `hash_u64()`, but the operation -/// may fail as some files may not have been generated. -/// -/// Note that dependencies are taken into account for fingerprints because rustc -/// requires that whenever an upstream crate is recompiled that all downstream -/// dependents are also recompiled. This is typically tracked through -/// `DependencyQueue`, but it also needs to be retained here because Cargo can -/// be interrupted while executing, losing the state of the `DependencyQueue` -/// graph. -#[derive(Serialize, Deserialize)] -pub struct Fingerprint { - /// Hash of the version of `rustc` used. - rustc: u64, - /// Sorted list of cfg features enabled. - features: String, - /// Hash of the `Target` struct, including the target name, - /// package-relative source path, edition, etc. - target: u64, - /// Hash of the `Profile`, `CompileMode`, and any extra flags passed via - /// `cargo rustc` or `cargo rustdoc`. - profile: u64, - /// Hash of the path to the base source file. This is relative to the - /// workspace root for path members, or absolute for other sources. - path: u64, - /// Fingerprints of dependencies. - deps: Vec, - /// Information about the inputs that affect this Unit (such as source - /// file mtimes or build script environment variables). - local: Mutex>, - /// Cached hash of the `Fingerprint` struct. Used to improve performance - /// for hashing. - #[serde(skip)] - memoized_hash: Mutex>, - /// RUSTFLAGS/RUSTDOCFLAGS environment variable value (or config value). - rustflags: Vec, - /// Hash of some metadata from the manifest, such as "authors", or - /// "description", which are exposed as environment variables during - /// compilation. - metadata: u64, - /// Hash of various config settings that change how things are compiled. - config: u64, - /// The rustc target. This is only relevant for `.json` files, otherwise - /// the metadata hash segregates the units. - compile_kind: u64, - /// Description of whether the filesystem status for this unit is up to date - /// or should be considered stale. - #[serde(skip)] - fs_status: FsStatus, - /// Files, relative to `target_root`, that are produced by the step that - /// this `Fingerprint` represents. This is used to detect when the whole - /// fingerprint is out of date if this is missing, or if previous - /// fingerprints output files are regenerated and look newer than this one. - #[serde(skip)] - outputs: Vec, -} - -/// Indication of the status on the filesystem for a particular unit. -enum FsStatus { - /// This unit is to be considered stale, even if hash information all - /// matches. The filesystem inputs have changed (or are missing) and the - /// unit needs to subsequently be recompiled. - Stale, - - /// This unit is up-to-date. All outputs and their corresponding mtime are - /// listed in the payload here for other dependencies to compare against. - UpToDate { mtimes: HashMap }, -} - -impl FsStatus { - fn up_to_date(&self) -> bool { - match self { - FsStatus::UpToDate { .. } => true, - FsStatus::Stale => false, - } - } -} - -impl Default for FsStatus { - fn default() -> FsStatus { - FsStatus::Stale - } -} - -impl Serialize for DepFingerprint { - fn serialize(&self, ser: S) -> Result - where - S: ser::Serializer, - { - ( - &self.pkg_id, - &self.name, - &self.public, - &self.fingerprint.hash_u64(), - ) - .serialize(ser) - } -} - -impl<'de> Deserialize<'de> for DepFingerprint { - fn deserialize(d: D) -> Result - where - D: de::Deserializer<'de>, - { - let (pkg_id, name, public, hash) = <(u64, String, bool, u64)>::deserialize(d)?; - Ok(DepFingerprint { - pkg_id, - name: InternedString::new(&name), - public, - fingerprint: Arc::new(Fingerprint { - memoized_hash: Mutex::new(Some(hash)), - ..Fingerprint::new() - }), - // This field is never read since it's only used in - // `check_filesystem` which isn't used by fingerprints loaded from - // disk. - only_requires_rmeta: false, - }) - } -} - -/// A `LocalFingerprint` represents something that we use to detect direct -/// changes to a `Fingerprint`. -/// -/// This is where we track file information, env vars, etc. This -/// `LocalFingerprint` struct is hashed and if the hash changes will force a -/// recompile of any fingerprint it's included into. Note that the "local" -/// terminology comes from the fact that it only has to do with one crate, and -/// `Fingerprint` tracks the transitive propagation of fingerprint changes. -/// -/// Note that because this is hashed its contents are carefully managed. Like -/// mentioned in the above module docs, we don't want to hash absolute paths or -/// mtime information. -/// -/// Also note that a `LocalFingerprint` is used in `check_filesystem` to detect -/// when the filesystem contains stale information (based on mtime currently). -/// The paths here don't change much between compilations but they're used as -/// inputs when we probe the filesystem looking at information. -#[derive(Debug, Serialize, Deserialize, Hash)] -enum LocalFingerprint { - /// This is a precalculated fingerprint which has an opaque string we just - /// hash as usual. This variant is primarily used for rustdoc where we - /// don't have a dep-info file to compare against. - /// - /// This is also used for build scripts with no `rerun-if-*` statements, but - /// that's overall a mistake and causes bugs in Cargo. We shouldn't use this - /// for build scripts. - Precalculated(String), - - /// This is used for crate compilations. The `dep_info` file is a relative - /// path anchored at `target_root(...)` to the dep-info file that Cargo - /// generates (which is a custom serialization after parsing rustc's own - /// `dep-info` output). - /// - /// The `dep_info` file, when present, also lists a number of other files - /// for us to look at. If any of those files are newer than this file then - /// we need to recompile. - CheckDepInfo { dep_info: PathBuf }, - - /// This represents a nonempty set of `rerun-if-changed` annotations printed - /// out by a build script. The `output` file is a relative file anchored at - /// `target_root(...)` which is the actual output of the build script. That - /// output has already been parsed and the paths printed out via - /// `rerun-if-changed` are listed in `paths`. The `paths` field is relative - /// to `pkg.root()` - /// - /// This is considered up-to-date if all of the `paths` are older than - /// `output`, otherwise we need to recompile. - RerunIfChanged { - output: PathBuf, - paths: Vec, - }, - - /// This represents a single `rerun-if-env-changed` annotation printed by a - /// build script. The exact env var and value are hashed here. There's no - /// filesystem dependence here, and if the values are changed the hash will - /// change forcing a recompile. - RerunIfEnvChanged { var: String, val: Option }, -} - -enum StaleItem { - MissingFile(PathBuf), - ChangedFile { - reference: PathBuf, - reference_mtime: FileTime, - stale: PathBuf, - stale_mtime: FileTime, - }, - ChangedEnv { - var: String, - previous: Option, - current: Option, - }, -} - -impl LocalFingerprint { - /// Checks dynamically at runtime if this `LocalFingerprint` has a stale - /// item inside of it. - /// - /// The main purpose of this function is to handle two different ways - /// fingerprints can be invalidated: - /// - /// * One is a dependency listed in rustc's dep-info files is invalid. Note - /// that these could either be env vars or files. We check both here. - /// - /// * Another is the `rerun-if-changed` directive from build scripts. This - /// is where we'll find whether files have actually changed - fn find_stale_item( - &self, - mtime_cache: &mut HashMap, - pkg_root: &Path, - target_root: &Path, - cargo_exe: &Path, - ) -> CargoResult> { - match self { - // We need to parse `dep_info`, learn about the crate's dependencies. - // - // For each env var we see if our current process's env var still - // matches, and for each file we see if any of them are newer than - // the `dep_info` file itself whose mtime represents the start of - // rustc. - LocalFingerprint::CheckDepInfo { dep_info } => { - let dep_info = target_root.join(dep_info); - let info = match parse_dep_info(pkg_root, target_root, &dep_info)? { - Some(info) => info, - None => return Ok(Some(StaleItem::MissingFile(dep_info))), - }; - for (key, previous) in info.env.iter() { - let current = if key == CARGO_ENV { - Some( - cargo_exe - .to_str() - .ok_or_else(|| { - format_err!( - "cargo exe path {} must be valid UTF-8", - cargo_exe.display() - ) - })? - .to_string(), - ) - } else { - env::var(key).ok() - }; - if current == *previous { - continue; - } - return Ok(Some(StaleItem::ChangedEnv { - var: key.clone(), - previous: previous.clone(), - current, - })); - } - Ok(find_stale_file(mtime_cache, &dep_info, info.files.iter())) - } - - // We need to verify that no paths listed in `paths` are newer than - // the `output` path itself, or the last time the build script ran. - LocalFingerprint::RerunIfChanged { output, paths } => Ok(find_stale_file( - mtime_cache, - &target_root.join(output), - paths.iter().map(|p| pkg_root.join(p)), - )), - - // These have no dependencies on the filesystem, and their values - // are included natively in the `Fingerprint` hash so nothing - // tocheck for here. - LocalFingerprint::RerunIfEnvChanged { .. } => Ok(None), - LocalFingerprint::Precalculated(..) => Ok(None), - } - } - - fn kind(&self) -> &'static str { - match self { - LocalFingerprint::Precalculated(..) => "precalculated", - LocalFingerprint::CheckDepInfo { .. } => "dep-info", - LocalFingerprint::RerunIfChanged { .. } => "rerun-if-changed", - LocalFingerprint::RerunIfEnvChanged { .. } => "rerun-if-env-changed", - } - } -} - -impl Fingerprint { - fn new() -> Fingerprint { - Fingerprint { - rustc: 0, - target: 0, - profile: 0, - path: 0, - features: String::new(), - deps: Vec::new(), - local: Mutex::new(Vec::new()), - memoized_hash: Mutex::new(None), - rustflags: Vec::new(), - metadata: 0, - config: 0, - compile_kind: 0, - fs_status: FsStatus::Stale, - outputs: Vec::new(), - } - } - - /// For performance reasons fingerprints will memoize their own hash, but - /// there's also internal mutability with its `local` field which can - /// change, for example with build scripts, during a build. - /// - /// This method can be used to bust all memoized hashes just before a build - /// to ensure that after a build completes everything is up-to-date. - pub fn clear_memoized(&self) { - *self.memoized_hash.lock().unwrap() = None; - } - - fn hash_u64(&self) -> u64 { - if let Some(s) = *self.memoized_hash.lock().unwrap() { - return s; - } - let ret = util::hash_u64(self); - *self.memoized_hash.lock().unwrap() = Some(ret); - ret - } - - /// Compares this fingerprint with an old version which was previously - /// serialized to filesystem. - /// - /// The purpose of this is exclusively to produce a diagnostic message - /// indicating why we're recompiling something. This function always returns - /// an error, it will never return success. - fn compare(&self, old: &Fingerprint) -> CargoResult<()> { - if self.rustc != old.rustc { - bail!("rust compiler has changed") - } - if self.features != old.features { - bail!( - "features have changed: previously {}, now {}", - old.features, - self.features - ) - } - if self.target != old.target { - bail!("target configuration has changed") - } - if self.path != old.path { - bail!("path to the source has changed") - } - if self.profile != old.profile { - bail!("profile configuration has changed") - } - if self.rustflags != old.rustflags { - bail!( - "RUSTFLAGS has changed: previously {:?}, now {:?}", - old.rustflags, - self.rustflags - ) - } - if self.metadata != old.metadata { - bail!("metadata changed") - } - if self.config != old.config { - bail!("configuration settings have changed") - } - if self.compile_kind != old.compile_kind { - bail!("compile kind (rustc target) changed") - } - let my_local = self.local.lock().unwrap(); - let old_local = old.local.lock().unwrap(); - if my_local.len() != old_local.len() { - bail!("local lens changed"); - } - for (new, old) in my_local.iter().zip(old_local.iter()) { - match (new, old) { - (LocalFingerprint::Precalculated(a), LocalFingerprint::Precalculated(b)) => { - if a != b { - bail!( - "precalculated components have changed: previously {}, now {}", - b, - a - ) - } - } - ( - LocalFingerprint::CheckDepInfo { dep_info: adep }, - LocalFingerprint::CheckDepInfo { dep_info: bdep }, - ) => { - if adep != bdep { - bail!( - "dep info output changed: previously {:?}, now {:?}", - bdep, - adep - ) - } - } - ( - LocalFingerprint::RerunIfChanged { - output: aout, - paths: apaths, - }, - LocalFingerprint::RerunIfChanged { - output: bout, - paths: bpaths, - }, - ) => { - if aout != bout { - bail!( - "rerun-if-changed output changed: previously {:?}, now {:?}", - bout, - aout - ) - } - if apaths != bpaths { - bail!( - "rerun-if-changed output changed: previously {:?}, now {:?}", - bpaths, - apaths, - ) - } - } - ( - LocalFingerprint::RerunIfEnvChanged { - var: akey, - val: avalue, - }, - LocalFingerprint::RerunIfEnvChanged { - var: bkey, - val: bvalue, - }, - ) => { - if *akey != *bkey { - bail!("env vars changed: previously {}, now {}", bkey, akey); - } - if *avalue != *bvalue { - bail!( - "env var `{}` changed: previously {:?}, now {:?}", - akey, - bvalue, - avalue - ) - } - } - (a, b) => bail!( - "local fingerprint type has changed ({} => {})", - b.kind(), - a.kind() - ), - } - } - - if self.deps.len() != old.deps.len() { - bail!("number of dependencies has changed") - } - for (a, b) in self.deps.iter().zip(old.deps.iter()) { - if a.name != b.name { - let e = format_err!("`{}` != `{}`", a.name, b.name) - .context("unit dependency name changed"); - return Err(e); - } - - if a.fingerprint.hash_u64() != b.fingerprint.hash_u64() { - let e = format_err!( - "new ({}/{:x}) != old ({}/{:x})", - a.name, - a.fingerprint.hash_u64(), - b.name, - b.fingerprint.hash_u64() - ) - .context("unit dependency information changed"); - return Err(e); - } - } - - if !self.fs_status.up_to_date() { - bail!("current filesystem status shows we're outdated"); - } - - // This typically means some filesystem modifications happened or - // something transitive was odd. In general we should strive to provide - // a better error message than this, so if you see this message a lot it - // likely means this method needs to be updated! - bail!("two fingerprint comparison turned up nothing obvious"); - } - - /// Dynamically inspect the local filesystem to update the `fs_status` field - /// of this `Fingerprint`. - /// - /// This function is used just after a `Fingerprint` is constructed to check - /// the local state of the filesystem and propagate any dirtiness from - /// dependencies up to this unit as well. This function assumes that the - /// unit starts out as `FsStatus::Stale` and then it will optionally switch - /// it to `UpToDate` if it can. - fn check_filesystem( - &mut self, - mtime_cache: &mut HashMap, - pkg_root: &Path, - target_root: &Path, - cargo_exe: &Path, - ) -> CargoResult<()> { - assert!(!self.fs_status.up_to_date()); - - let mut mtimes = HashMap::new(); - - // Get the `mtime` of all outputs. Optionally update their mtime - // afterwards based on the `mtime_on_use` flag. Afterwards we want the - // minimum mtime as it's the one we'll be comparing to inputs and - // dependencies. - for output in self.outputs.iter() { - let mtime = match paths::mtime(output) { - Ok(mtime) => mtime, - - // This path failed to report its `mtime`. It probably doesn't - // exists, so leave ourselves as stale and bail out. - Err(e) => { - debug!("failed to get mtime of {:?}: {}", output, e); - return Ok(()); - } - }; - assert!(mtimes.insert(output.clone(), mtime).is_none()); - } - - let opt_max = mtimes.iter().max_by_key(|kv| kv.1); - let (max_path, max_mtime) = match opt_max { - Some(mtime) => mtime, - - // We had no output files. This means we're an overridden build - // script and we're just always up to date because we aren't - // watching the filesystem. - None => { - self.fs_status = FsStatus::UpToDate { mtimes }; - return Ok(()); - } - }; - debug!( - "max output mtime for {:?} is {:?} {}", - pkg_root, max_path, max_mtime - ); - - for dep in self.deps.iter() { - let dep_mtimes = match &dep.fingerprint.fs_status { - FsStatus::UpToDate { mtimes } => mtimes, - // If our dependency is stale, so are we, so bail out. - FsStatus::Stale => return Ok(()), - }; - - // If our dependency edge only requires the rmeta file to be present - // then we only need to look at that one output file, otherwise we - // need to consider all output files to see if we're out of date. - let (dep_path, dep_mtime) = if dep.only_requires_rmeta { - dep_mtimes - .iter() - .find(|(path, _mtime)| { - path.extension().and_then(|s| s.to_str()) == Some("rmeta") - }) - .expect("failed to find rmeta") - } else { - match dep_mtimes.iter().max_by_key(|kv| kv.1) { - Some(dep_mtime) => dep_mtime, - // If our dependencies is up to date and has no filesystem - // interactions, then we can move on to the next dependency. - None => continue, - } - }; - debug!( - "max dep mtime for {:?} is {:?} {}", - pkg_root, dep_path, dep_mtime - ); - - // If the dependency is newer than our own output then it was - // recompiled previously. We transitively become stale ourselves in - // that case, so bail out. - // - // Note that this comparison should probably be `>=`, not `>`, but - // for a discussion of why it's `>` see the discussion about #5918 - // below in `find_stale`. - if dep_mtime > max_mtime { - info!( - "dependency on `{}` is newer than we are {} > {} {:?}", - dep.name, dep_mtime, max_mtime, pkg_root - ); - return Ok(()); - } - } - - // If we reached this far then all dependencies are up to date. Check - // all our `LocalFingerprint` information to see if we have any stale - // files for this package itself. If we do find something log a helpful - // message and bail out so we stay stale. - for local in self.local.get_mut().unwrap().iter() { - if let Some(item) = - local.find_stale_item(mtime_cache, pkg_root, target_root, cargo_exe)? - { - item.log(); - return Ok(()); - } - } - - // Everything was up to date! Record such. - self.fs_status = FsStatus::UpToDate { mtimes }; - debug!("filesystem up-to-date {:?}", pkg_root); - - Ok(()) - } -} - -impl hash::Hash for Fingerprint { - fn hash(&self, h: &mut H) { - let Fingerprint { - rustc, - ref features, - target, - path, - profile, - ref deps, - ref local, - metadata, - config, - compile_kind, - ref rustflags, - .. - } = *self; - let local = local.lock().unwrap(); - ( - rustc, - features, - target, - path, - profile, - &*local, - metadata, - config, - compile_kind, - rustflags, - ) - .hash(h); - - h.write_usize(deps.len()); - for DepFingerprint { - pkg_id, - name, - public, - fingerprint, - only_requires_rmeta: _, // static property, no need to hash - } in deps - { - pkg_id.hash(h); - name.hash(h); - public.hash(h); - // use memoized dep hashes to avoid exponential blowup - h.write_u64(fingerprint.hash_u64()); - } - } -} - -impl DepFingerprint { - fn new(cx: &mut Context<'_, '_>, parent: &Unit, dep: &UnitDep) -> CargoResult { - let fingerprint = calculate(cx, &dep.unit)?; - // We need to be careful about what we hash here. We have a goal of - // supporting renaming a project directory and not rebuilding - // everything. To do that, however, we need to make sure that the cwd - // doesn't make its way into any hashes, and one source of that is the - // `SourceId` for `path` packages. - // - // We already have a requirement that `path` packages all have unique - // names (sort of for this same reason), so if the package source is a - // `path` then we just hash the name, but otherwise we hash the full - // id as it won't change when the directory is renamed. - let pkg_id = if dep.unit.pkg.package_id().source_id().is_path() { - util::hash_u64(dep.unit.pkg.package_id().name()) - } else { - util::hash_u64(dep.unit.pkg.package_id()) - }; - - Ok(DepFingerprint { - pkg_id, - name: dep.extern_crate_name, - public: dep.public, - fingerprint, - only_requires_rmeta: cx.only_requires_rmeta(parent, &dep.unit), - }) - } -} - -impl StaleItem { - /// Use the `log` crate to log a hopefully helpful message in diagnosing - /// what file is considered stale and why. This is intended to be used in - /// conjunction with `CARGO_LOG` to determine why Cargo is recompiling - /// something. Currently there's no user-facing usage of this other than - /// that. - fn log(&self) { - match self { - StaleItem::MissingFile(path) => { - info!("stale: missing {:?}", path); - } - StaleItem::ChangedFile { - reference, - reference_mtime, - stale, - stale_mtime, - } => { - info!("stale: changed {:?}", stale); - info!(" (vs) {:?}", reference); - info!(" {:?} != {:?}", reference_mtime, stale_mtime); - } - StaleItem::ChangedEnv { - var, - previous, - current, - } => { - info!("stale: changed env {:?}", var); - info!(" {:?} != {:?}", previous, current); - } - } - } -} - -/// Calculates the fingerprint for a `unit`. -/// -/// This fingerprint is used by Cargo to learn about when information such as: -/// -/// * A non-path package changes (changes version, changes revision, etc). -/// * Any dependency changes -/// * The compiler changes -/// * The set of features a package is built with changes -/// * The profile a target is compiled with changes (e.g., opt-level changes) -/// * Any other compiler flags change that will affect the result -/// -/// Information like file modification time is only calculated for path -/// dependencies. -fn calculate(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult> { - // This function is slammed quite a lot, so the result is memoized. - if let Some(s) = cx.fingerprints.get(unit) { - return Ok(Arc::clone(s)); - } - let mut fingerprint = if unit.mode.is_run_custom_build() { - calculate_run_custom_build(cx, unit)? - } else if unit.mode.is_doc_test() { - panic!("doc tests do not fingerprint"); - } else { - calculate_normal(cx, unit)? - }; - - // After we built the initial `Fingerprint` be sure to update the - // `fs_status` field of it. - let target_root = target_root(cx); - let cargo_exe = cx.bcx.config.cargo_exe()?; - fingerprint.check_filesystem( - &mut cx.mtime_cache, - unit.pkg.root(), - &target_root, - cargo_exe, - )?; - - let fingerprint = Arc::new(fingerprint); - cx.fingerprints - .insert(unit.clone(), Arc::clone(&fingerprint)); - Ok(fingerprint) -} - -/// Calculate a fingerprint for a "normal" unit, or anything that's not a build -/// script. This is an internal helper of `calculate`, don't call directly. -fn calculate_normal(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { - // Recursively calculate the fingerprint for all of our dependencies. - // - // Skip fingerprints of binaries because they don't actually induce a - // recompile, they're just dependencies in the sense that they need to be - // built. - // - // Create Vec since mutable cx is needed in closure. - let deps = Vec::from(cx.unit_deps(unit)); - let mut deps = deps - .into_iter() - .filter(|dep| !dep.unit.target.is_bin()) - .map(|dep| DepFingerprint::new(cx, unit, &dep)) - .collect::>>()?; - deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); - - // Afterwards calculate our own fingerprint information. - let target_root = target_root(cx); - let local = if unit.mode.is_doc() { - // rustdoc does not have dep-info files. - let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| { - format!( - "failed to determine package fingerprint for documenting {}", - unit.pkg - ) - })?; - vec![LocalFingerprint::Precalculated(fingerprint)] - } else { - let dep_info = dep_info_loc(cx, unit); - let dep_info = dep_info.strip_prefix(&target_root).unwrap().to_path_buf(); - vec![LocalFingerprint::CheckDepInfo { dep_info }] - }; - - // Figure out what the outputs of our unit is, and we'll be storing them - // into the fingerprint as well. - let outputs = cx - .outputs(unit)? - .iter() - .filter(|output| !matches!(output.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) - .map(|output| output.path.clone()) - .collect(); - - // Fill out a bunch more information that we'll be tracking typically - // hashed to take up less space on disk as we just need to know when things - // change. - let extra_flags = if unit.mode.is_doc() { - cx.bcx.rustdocflags_args(unit) - } else { - cx.bcx.rustflags_args(unit) - } - .to_vec(); - - let profile_hash = util::hash_u64(( - &unit.profile, - unit.mode, - cx.bcx.extra_args_for(unit), - cx.lto[unit], - )); - // Include metadata since it is exposed as environment variables. - let m = unit.pkg.manifest().metadata(); - let metadata = util::hash_u64((&m.authors, &m.description, &m.homepage, &m.repository)); - let mut config = StableHasher::new(); - if let Some(linker) = cx.bcx.linker(unit.kind) { - linker.hash(&mut config); - } - if unit.mode.is_doc() && cx.bcx.config.cli_unstable().rustdoc_map { - if let Ok(map) = cx.bcx.config.doc_extern_map() { - map.hash(&mut config); - } - } - if let Some(allow_features) = &cx.bcx.config.cli_unstable().allow_features { - allow_features.hash(&mut config); - } - let compile_kind = unit.kind.fingerprint_hash(); - Ok(Fingerprint { - rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), - target: util::hash_u64(&unit.target), - profile: profile_hash, - // Note that .0 is hashed here, not .1 which is the cwd. That doesn't - // actually affect the output artifact so there's no need to hash it. - path: util::hash_u64(path_args(cx.bcx.ws, unit).0), - features: format!("{:?}", unit.features), - deps, - local: Mutex::new(local), - memoized_hash: Mutex::new(None), - metadata, - config: config.finish(), - compile_kind, - rustflags: extra_flags, - fs_status: FsStatus::Stale, - outputs, - }) -} - -/// Calculate a fingerprint for an "execute a build script" unit. This is an -/// internal helper of `calculate`, don't call directly. -fn calculate_run_custom_build(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { - assert!(unit.mode.is_run_custom_build()); - // Using the `BuildDeps` information we'll have previously parsed and - // inserted into `build_explicit_deps` built an initial snapshot of the - // `LocalFingerprint` list for this build script. If we previously executed - // the build script this means we'll be watching files and env vars. - // Otherwise if we haven't previously executed it we'll just start watching - // the whole crate. - let (gen_local, overridden) = build_script_local_fingerprints(cx, unit); - let deps = &cx.build_explicit_deps[unit]; - let local = (gen_local)( - deps, - Some(&|| { - const IO_ERR_MESSAGE: &str = "\ -An I/O error happened. Please make sure you can access the file. - -By default, if your project contains a build script, cargo scans all files in -it to determine whether a rebuild is needed. If you don't expect to access the -file, specify `rerun-if-changed` in your build script. -See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-changed for more information."; - pkg_fingerprint(cx.bcx, &unit.pkg).map_err(|err| { - let mut message = format!("failed to determine package fingerprint for build script for {}", unit.pkg); - if err.root_cause().is::() { - message = format!("{}\n{}", message, IO_ERR_MESSAGE) - } - err.context(message) - }) - }), - )? - .unwrap(); - let output = deps.build_script_output.clone(); - - // Include any dependencies of our execution, which is typically just the - // compilation of the build script itself. (if the build script changes we - // should be rerun!). Note though that if we're an overridden build script - // we have no dependencies so no need to recurse in that case. - let deps = if overridden { - // Overridden build scripts don't need to track deps. - vec![] - } else { - // Create Vec since mutable cx is needed in closure. - let deps = Vec::from(cx.unit_deps(unit)); - deps.into_iter() - .map(|dep| DepFingerprint::new(cx, unit, &dep)) - .collect::>>()? - }; - - Ok(Fingerprint { - local: Mutex::new(local), - rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), - deps, - outputs: if overridden { Vec::new() } else { vec![output] }, - - // Most of the other info is blank here as we don't really include it - // in the execution of the build script, but... this may be a latent - // bug in Cargo. - ..Fingerprint::new() - }) -} - -/// Get ready to compute the `LocalFingerprint` values for a `RunCustomBuild` -/// unit. -/// -/// This function has, what's on the surface, a seriously wonky interface. -/// You'll call this function and it'll return a closure and a boolean. The -/// boolean is pretty simple in that it indicates whether the `unit` has been -/// overridden via `.cargo/config`. The closure is much more complicated. -/// -/// This closure is intended to capture any local state necessary to compute -/// the `LocalFingerprint` values for this unit. It is `Send` and `'static` to -/// be sent to other threads as well (such as when we're executing build -/// scripts). That deduplication is the rationale for the closure at least. -/// -/// The arguments to the closure are a bit weirder, though, and I'll apologize -/// in advance for the weirdness too. The first argument to the closure is a -/// `&BuildDeps`. This is the parsed version of a build script, and when Cargo -/// starts up this is cached from previous runs of a build script. After a -/// build script executes the output file is reparsed and passed in here. -/// -/// The second argument is the weirdest, it's *optionally* a closure to -/// call `pkg_fingerprint` below. The `pkg_fingerprint` below requires access -/// to "source map" located in `Context`. That's very non-`'static` and -/// non-`Send`, so it can't be used on other threads, such as when we invoke -/// this after a build script has finished. The `Option` allows us to for sure -/// calculate it on the main thread at the beginning, and then swallow the bug -/// for now where a worker thread after a build script has finished doesn't -/// have access. Ideally there would be no second argument or it would be more -/// "first class" and not an `Option` but something that can be sent between -/// threads. In any case, it's a bug for now. -/// -/// This isn't the greatest of interfaces, and if there's suggestions to -/// improve please do so! -/// -/// FIXME(#6779) - see all the words above -fn build_script_local_fingerprints( - cx: &mut Context<'_, '_>, - unit: &Unit, -) -> ( - Box< - dyn FnOnce( - &BuildDeps, - Option<&dyn Fn() -> CargoResult>, - ) -> CargoResult>> - + Send, - >, - bool, -) { - assert!(unit.mode.is_run_custom_build()); - // First up, if this build script is entirely overridden, then we just - // return the hash of what we overrode it with. This is the easy case! - if let Some(fingerprint) = build_script_override_fingerprint(cx, unit) { - debug!("override local fingerprints deps {}", unit.pkg); - return ( - Box::new( - move |_: &BuildDeps, _: Option<&dyn Fn() -> CargoResult>| { - Ok(Some(vec![fingerprint])) - }, - ), - true, // this is an overridden build script - ); - } - - // ... Otherwise this is a "real" build script and we need to return a real - // closure. Our returned closure classifies the build script based on - // whether it prints `rerun-if-*`. If it *doesn't* print this it's where the - // magical second argument comes into play, which fingerprints a whole - // package. Remember that the fact that this is an `Option` is a bug, but a - // longstanding bug, in Cargo. Recent refactorings just made it painfully - // obvious. - let pkg_root = unit.pkg.root().to_path_buf(); - let target_dir = target_root(cx); - let calculate = - move |deps: &BuildDeps, pkg_fingerprint: Option<&dyn Fn() -> CargoResult>| { - if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { - match pkg_fingerprint { - // FIXME: this is somewhat buggy with respect to docker and - // weird filesystems. The `Precalculated` variant - // constructed below will, for `path` dependencies, contain - // a stringified version of the mtime for the local crate. - // This violates one of the things we describe in this - // module's doc comment, never hashing mtimes. We should - // figure out a better scheme where a package fingerprint - // may be a string (like for a registry) or a list of files - // (like for a path dependency). Those list of files would - // be stored here rather than the the mtime of them. - Some(f) => { - let s = f()?; - debug!( - "old local fingerprints deps {:?} precalculated={:?}", - pkg_root, s - ); - return Ok(Some(vec![LocalFingerprint::Precalculated(s)])); - } - None => return Ok(None), - } - } - - // Ok so now we're in "new mode" where we can have files listed as - // dependencies as well as env vars listed as dependencies. Process - // them all here. - Ok(Some(local_fingerprints_deps(deps, &target_dir, &pkg_root))) - }; - - // Note that `false` == "not overridden" - (Box::new(calculate), false) -} - -/// Create a `LocalFingerprint` for an overridden build script. -/// Returns None if it is not overridden. -fn build_script_override_fingerprint( - cx: &mut Context<'_, '_>, - unit: &Unit, -) -> Option { - // Build script output is only populated at this stage when it is - // overridden. - let build_script_outputs = cx.build_script_outputs.lock().unwrap(); - let metadata = cx.get_run_build_script_metadata(unit); - // Returns None if it is not overridden. - let output = build_script_outputs.get(metadata)?; - let s = format!( - "overridden build state with hash: {}", - util::hash_u64(output) - ); - Some(LocalFingerprint::Precalculated(s)) -} - -/// Compute the `LocalFingerprint` values for a `RunCustomBuild` unit for -/// non-overridden new-style build scripts only. This is only used when `deps` -/// is already known to have a nonempty `rerun-if-*` somewhere. -fn local_fingerprints_deps( - deps: &BuildDeps, - target_root: &Path, - pkg_root: &Path, -) -> Vec { - debug!("new local fingerprints deps {:?}", pkg_root); - let mut local = Vec::new(); - - if !deps.rerun_if_changed.is_empty() { - // Note that like the module comment above says we are careful to never - // store an absolute path in `LocalFingerprint`, so ensure that we strip - // absolute prefixes from them. - let output = deps - .build_script_output - .strip_prefix(target_root) - .unwrap() - .to_path_buf(); - let paths = deps - .rerun_if_changed - .iter() - .map(|p| p.strip_prefix(pkg_root).unwrap_or(p).to_path_buf()) - .collect(); - local.push(LocalFingerprint::RerunIfChanged { output, paths }); - } - - for var in deps.rerun_if_env_changed.iter() { - let val = env::var(var).ok(); - local.push(LocalFingerprint::RerunIfEnvChanged { - var: var.clone(), - val, - }); - } - - local -} - -fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { - debug_assert_ne!(fingerprint.rustc, 0); - // fingerprint::new().rustc == 0, make sure it doesn't make it to the file system. - // This is mostly so outside tools can reliably find out what rust version this file is for, - // as we can use the full hash. - let hash = fingerprint.hash_u64(); - debug!("write fingerprint ({:x}) : {}", hash, loc.display()); - paths::write(loc, util::to_hex(hash).as_bytes())?; - - let json = serde_json::to_string(fingerprint).unwrap(); - if cfg!(debug_assertions) { - let f: Fingerprint = serde_json::from_str(&json).unwrap(); - assert_eq!(f.hash_u64(), hash); - } - paths::write(&loc.with_extension("json"), json.as_bytes())?; - Ok(()) -} - -/// Prepare for work when a package starts to build -pub fn prepare_init(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { - let new1 = cx.files().fingerprint_dir(unit); - - // Doc tests have no output, thus no fingerprint. - if !new1.exists() && !unit.mode.is_doc_test() { - paths::create_dir_all(&new1)?; - } - - Ok(()) -} - -/// Returns the location that the dep-info file will show up at for the `unit` -/// specified. -pub fn dep_info_loc(cx: &mut Context<'_, '_>, unit: &Unit) -> PathBuf { - cx.files().fingerprint_file_path(unit, "dep-") -} - -/// Returns an absolute path that target directory. -/// All paths are rewritten to be relative to this. -fn target_root(cx: &Context<'_, '_>) -> PathBuf { - cx.bcx.ws.target_dir().into_path_unlocked() -} - -fn compare_old_fingerprint( - loc: &Path, - new_fingerprint: &Fingerprint, - mtime_on_use: bool, -) -> CargoResult<()> { - let old_fingerprint_short = paths::read(loc)?; - - if mtime_on_use { - // update the mtime so other cleaners know we used it - let t = FileTime::from_system_time(SystemTime::now()); - debug!("mtime-on-use forcing {:?} to {}", loc, t); - paths::set_file_time_no_err(loc, t); - } - - let new_hash = new_fingerprint.hash_u64(); - - if util::to_hex(new_hash) == old_fingerprint_short && new_fingerprint.fs_status.up_to_date() { - return Ok(()); - } - - let old_fingerprint_json = paths::read(&loc.with_extension("json"))?; - let old_fingerprint: Fingerprint = serde_json::from_str(&old_fingerprint_json) - .with_context(|| internal("failed to deserialize json"))?; - // Fingerprint can be empty after a failed rebuild (see comment in prepare_target). - if !old_fingerprint_short.is_empty() { - debug_assert_eq!( - util::to_hex(old_fingerprint.hash_u64()), - old_fingerprint_short - ); - } - let result = new_fingerprint.compare(&old_fingerprint); - assert!(result.is_err()); - result -} - -fn log_compare(unit: &Unit, compare: &CargoResult<()>) { - let ce = match compare { - Ok(..) => return, - Err(e) => e, - }; - info!( - "fingerprint error for {}/{:?}/{:?}", - unit.pkg, unit.mode, unit.target, - ); - info!(" err: {:?}", ce); -} - -/// Parses Cargo's internal `EncodedDepInfo` structure that was previously -/// serialized to disk. -/// -/// Note that this is not rustc's `*.d` files. -/// -/// Also note that rustc's `*.d` files are translated to Cargo-specific -/// `EncodedDepInfo` files after compilations have finished in -/// `translate_dep_info`. -/// -/// Returns `None` if the file is corrupt or couldn't be read from disk. This -/// indicates that the crate should likely be rebuilt. -pub fn parse_dep_info( - pkg_root: &Path, - target_root: &Path, - dep_info: &Path, -) -> CargoResult> { - let data = match paths::read_bytes(dep_info) { - Ok(data) => data, - Err(_) => return Ok(None), - }; - let info = match EncodedDepInfo::parse(&data) { - Some(info) => info, - None => { - log::warn!("failed to parse cargo's dep-info at {:?}", dep_info); - return Ok(None); - } - }; - let mut ret = RustcDepInfo::default(); - ret.env = info.env; - for (ty, path) in info.files { - let path = match ty { - DepInfoPathType::PackageRootRelative => pkg_root.join(path), - // N.B. path might be absolute here in which case the join will have no effect - DepInfoPathType::TargetRootRelative => target_root.join(path), - }; - ret.files.push(path); - } - Ok(Some(ret)) -} - -fn pkg_fingerprint(bcx: &BuildContext<'_, '_>, pkg: &Package) -> CargoResult { - let source_id = pkg.package_id().source_id(); - let sources = bcx.packages.sources(); - - let source = sources - .get(source_id) - .ok_or_else(|| internal("missing package source"))?; - source.fingerprint(pkg) -} - -fn find_stale_file( - mtime_cache: &mut HashMap, - reference: &Path, - paths: I, -) -> Option -where - I: IntoIterator, - I::Item: AsRef, -{ - let reference_mtime = match paths::mtime(reference) { - Ok(mtime) => mtime, - Err(..) => return Some(StaleItem::MissingFile(reference.to_path_buf())), - }; - - for path in paths { - let path = path.as_ref(); - let path_mtime = match mtime_cache.entry(path.to_path_buf()) { - Entry::Occupied(o) => *o.get(), - Entry::Vacant(v) => { - let mtime = match paths::mtime_recursive(path) { - Ok(mtime) => mtime, - Err(..) => return Some(StaleItem::MissingFile(path.to_path_buf())), - }; - *v.insert(mtime) - } - }; - - // TODO: fix #5918. - // Note that equal mtimes should be considered "stale". For filesystems with - // not much timestamp precision like 1s this is would be a conservative approximation - // to handle the case where a file is modified within the same second after - // a build starts. We want to make sure that incremental rebuilds pick that up! - // - // For filesystems with nanosecond precision it's been seen in the wild that - // its "nanosecond precision" isn't really nanosecond-accurate. It turns out that - // kernels may cache the current time so files created at different times actually - // list the same nanosecond precision. Some digging on #5919 picked up that the - // kernel caches the current time between timer ticks, which could mean that if - // a file is updated at most 10ms after a build starts then Cargo may not - // pick up the build changes. - // - // All in all, an equality check here would be a conservative assumption that, - // if equal, files were changed just after a previous build finished. - // Unfortunately this became problematic when (in #6484) cargo switch to more accurately - // measuring the start time of builds. - if path_mtime <= reference_mtime { - continue; - } - - return Some(StaleItem::ChangedFile { - reference: reference.to_path_buf(), - reference_mtime, - stale: path.to_path_buf(), - stale_mtime: path_mtime, - }); - } - - debug!( - "all paths up-to-date relative to {:?} mtime={}", - reference, reference_mtime - ); - None -} - -enum DepInfoPathType { - // src/, e.g. src/lib.rs - PackageRootRelative, - // target/debug/deps/lib... - // or an absolute path /.../sysroot/... - TargetRootRelative, -} - -/// Parses the dep-info file coming out of rustc into a Cargo-specific format. -/// -/// This function will parse `rustc_dep_info` as a makefile-style dep info to -/// learn about the all files which a crate depends on. This is then -/// re-serialized into the `cargo_dep_info` path in a Cargo-specific format. -/// -/// The `pkg_root` argument here is the absolute path to the directory -/// containing `Cargo.toml` for this crate that was compiled. The paths listed -/// in the rustc dep-info file may or may not be absolute but we'll want to -/// consider all of them relative to the `root` specified. -/// -/// The `rustc_cwd` argument is the absolute path to the cwd of the compiler -/// when it was invoked. -/// -/// If the `allow_package` argument is true, then package-relative paths are -/// included. If it is false, then package-relative paths are skipped and -/// ignored (typically used for registry or git dependencies where we assume -/// the source never changes, and we don't want the cost of running `stat` on -/// all those files). See the module-level docs for the note about -/// `-Zbinary-dep-depinfo` for more details on why this is done. -/// -/// The serialized Cargo format will contain a list of files, all of which are -/// relative if they're under `root`. or absolute if they're elsewhere. -pub fn translate_dep_info( - rustc_dep_info: &Path, - cargo_dep_info: &Path, - rustc_cwd: &Path, - pkg_root: &Path, - target_root: &Path, - rustc_cmd: &ProcessBuilder, - allow_package: bool, -) -> CargoResult<()> { - let depinfo = parse_rustc_dep_info(rustc_dep_info)?; - - let target_root = target_root.canonicalize()?; - let pkg_root = pkg_root.canonicalize()?; - let mut on_disk_info = EncodedDepInfo::default(); - on_disk_info.env = depinfo.env; - - // This is a bit of a tricky statement, but here we're *removing* the - // dependency on environment variables that were defined specifically for - // the command itself. Environment variables returend by `get_envs` includes - // environment variables like: - // - // * `OUT_DIR` if applicable - // * env vars added by a build script, if any - // - // The general idea here is that the dep info file tells us what, when - // changed, should cause us to rebuild the crate. These environment - // variables are synthesized by Cargo and/or the build script, and the - // intention is that their values are tracked elsewhere for whether the - // crate needs to be rebuilt. - // - // For example a build script says when it needs to be rerun and otherwise - // it's assumed to produce the same output, so we're guaranteed that env - // vars defined by the build script will always be the same unless the build - // script itself reruns, in which case the crate will rerun anyway. - // - // For things like `OUT_DIR` it's a bit sketchy for now. Most of the time - // that's used for code generation but this is technically buggy where if - // you write a binary that does `println!("{}", env!("OUT_DIR"))` we won't - // recompile that if you move the target directory. Hopefully that's not too - // bad of an issue for now... - // - // This also includes `CARGO` since if the code is explicitly wanting to - // know that path, it should be rebuilt if it changes. The CARGO path is - // not tracked elsewhere in the fingerprint. - on_disk_info - .env - .retain(|(key, _)| !rustc_cmd.get_envs().contains_key(key) || key == CARGO_ENV); - - for file in depinfo.files { - // The path may be absolute or relative, canonical or not. Make sure - // it is canonicalized so we are comparing the same kinds of paths. - let abs_file = rustc_cwd.join(file); - // If canonicalization fails, just use the abs path. There is currently - // a bug where --remap-path-prefix is affecting .d files, causing them - // to point to non-existent paths. - let canon_file = abs_file.canonicalize().unwrap_or_else(|_| abs_file.clone()); - - let (ty, path) = if let Ok(stripped) = canon_file.strip_prefix(&target_root) { - (DepInfoPathType::TargetRootRelative, stripped) - } else if let Ok(stripped) = canon_file.strip_prefix(&pkg_root) { - if !allow_package { - continue; - } - (DepInfoPathType::PackageRootRelative, stripped) - } else { - // It's definitely not target root relative, but this is an absolute path (since it was - // joined to rustc_cwd) and as such re-joining it later to the target root will have no - // effect. - (DepInfoPathType::TargetRootRelative, &*abs_file) - }; - on_disk_info.files.push((ty, path.to_owned())); - } - paths::write(cargo_dep_info, on_disk_info.serialize()?)?; - Ok(()) -} - -#[derive(Default)] -pub struct RustcDepInfo { - /// The list of files that the main target in the dep-info file depends on. - pub files: Vec, - /// The list of environment variables we found that the rustc compilation - /// depends on. - /// - /// The first element of the pair is the name of the env var and the second - /// item is the value. `Some` means that the env var was set, and `None` - /// means that the env var wasn't actually set and the compilation depends - /// on it not being set. - pub env: Vec<(String, Option)>, -} - -// Same as `RustcDepInfo` except avoids absolute paths as much as possible to -// allow moving around the target directory. -// -// This is also stored in an optimized format to make parsing it fast because -// Cargo will read it for crates on all future compilations. -#[derive(Default)] -struct EncodedDepInfo { - files: Vec<(DepInfoPathType, PathBuf)>, - env: Vec<(String, Option)>, -} - -impl EncodedDepInfo { - fn parse(mut bytes: &[u8]) -> Option { - let bytes = &mut bytes; - let nfiles = read_usize(bytes)?; - let mut files = Vec::with_capacity(nfiles as usize); - for _ in 0..nfiles { - let ty = match read_u8(bytes)? { - 0 => DepInfoPathType::PackageRootRelative, - 1 => DepInfoPathType::TargetRootRelative, - _ => return None, - }; - let bytes = read_bytes(bytes)?; - files.push((ty, paths::bytes2path(bytes).ok()?)); - } - - let nenv = read_usize(bytes)?; - let mut env = Vec::with_capacity(nenv as usize); - for _ in 0..nenv { - let key = str::from_utf8(read_bytes(bytes)?).ok()?.to_string(); - let val = match read_u8(bytes)? { - 0 => None, - 1 => Some(str::from_utf8(read_bytes(bytes)?).ok()?.to_string()), - _ => return None, - }; - env.push((key, val)); - } - return Some(EncodedDepInfo { files, env }); - - fn read_usize(bytes: &mut &[u8]) -> Option { - let ret = bytes.get(..4)?; - *bytes = &bytes[4..]; - Some(u32::from_le_bytes(ret.try_into().unwrap()) as usize) - } - - fn read_u8(bytes: &mut &[u8]) -> Option { - let ret = *bytes.get(0)?; - *bytes = &bytes[1..]; - Some(ret) - } - - fn read_bytes<'a>(bytes: &mut &'a [u8]) -> Option<&'a [u8]> { - let n = read_usize(bytes)? as usize; - let ret = bytes.get(..n)?; - *bytes = &bytes[n..]; - Some(ret) - } - } - - fn serialize(&self) -> CargoResult> { - let mut ret = Vec::new(); - let dst = &mut ret; - write_usize(dst, self.files.len()); - for (ty, file) in self.files.iter() { - match ty { - DepInfoPathType::PackageRootRelative => dst.push(0), - DepInfoPathType::TargetRootRelative => dst.push(1), - } - write_bytes(dst, paths::path2bytes(file)?); - } - - write_usize(dst, self.env.len()); - for (key, val) in self.env.iter() { - write_bytes(dst, key); - match val { - None => dst.push(0), - Some(val) => { - dst.push(1); - write_bytes(dst, val); - } - } - } - return Ok(ret); - - fn write_bytes(dst: &mut Vec, val: impl AsRef<[u8]>) { - let val = val.as_ref(); - write_usize(dst, val.len()); - dst.extend_from_slice(val); - } - - fn write_usize(dst: &mut Vec, val: usize) { - dst.extend(&u32::to_le_bytes(val as u32)); - } - } -} - -/// Parse the `.d` dep-info file generated by rustc. -pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult { - let contents = paths::read(rustc_dep_info)?; - let mut ret = RustcDepInfo::default(); - let mut found_deps = false; - - for line in contents.lines() { - if let Some(rest) = line.strip_prefix("# env-dep:") { - let mut parts = rest.splitn(2, '='); - let env_var = match parts.next() { - Some(s) => s, - None => continue, - }; - let env_val = match parts.next() { - Some(s) => Some(unescape_env(s)?), - None => None, - }; - ret.env.push((unescape_env(env_var)?, env_val)); - } else if let Some(pos) = line.find(": ") { - if found_deps { - continue; - } - found_deps = true; - let mut deps = line[pos + 2..].split_whitespace(); - - while let Some(s) = deps.next() { - let mut file = s.to_string(); - while file.ends_with('\\') { - file.pop(); - file.push(' '); - file.push_str(deps.next().ok_or_else(|| { - internal("malformed dep-info format, trailing \\".to_string()) - })?); - } - ret.files.push(file.into()); - } - } - } - return Ok(ret); - - // rustc tries to fit env var names and values all on a single line, which - // means it needs to escape `\r` and `\n`. The escape syntax used is "\n" - // which means that `\` also needs to be escaped. - fn unescape_env(s: &str) -> CargoResult { - let mut ret = String::with_capacity(s.len()); - let mut chars = s.chars(); - while let Some(c) = chars.next() { - if c != '\\' { - ret.push(c); - continue; - } - match chars.next() { - Some('\\') => ret.push('\\'), - Some('n') => ret.push('\n'), - Some('r') => ret.push('\r'), - Some(c) => bail!("unknown escape character `{}`", c), - None => bail!("unterminated escape character"), - } - } - Ok(ret) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/future_incompat.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/future_incompat.rs deleted file mode 100644 index e13d331e0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/future_incompat.rs +++ /dev/null @@ -1,453 +0,0 @@ -//! Support for future-incompatible warning reporting. - -use crate::core::compiler::BuildContext; -use crate::core::{Dependency, PackageId, Workspace}; -use crate::sources::SourceConfigMap; -use crate::util::{iter_join, CargoResult, Config}; -use anyhow::{bail, format_err, Context}; -use serde::{Deserialize, Serialize}; -use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; -use std::fmt::Write as _; -use std::io::{Read, Write}; - -pub const REPORT_PREAMBLE: &str = "\ -The following warnings were discovered during the build. These warnings are an -indication that the packages contain code that will become an error in a -future release of Rust. These warnings typically cover changes to close -soundness problems, unintended or undocumented behavior, or critical problems -that cannot be fixed in a backwards-compatible fashion, and are not expected -to be in wide use. - -Each warning should contain a link for more information on what the warning -means and how to resolve it. -"; - -/// Current version of the on-disk format. -const ON_DISK_VERSION: u32 = 0; - -/// The future incompatibility report, emitted by the compiler as a JSON message. -#[derive(serde::Deserialize)] -pub struct FutureIncompatReport { - pub future_incompat_report: Vec, -} - -/// Structure used for collecting reports in-memory. -pub struct FutureIncompatReportPackage { - pub package_id: PackageId, - pub items: Vec, -} - -/// A single future-incompatible warning emitted by rustc. -#[derive(Serialize, Deserialize)] -pub struct FutureBreakageItem { - /// The date at which this lint will become an error. - /// Currently unused - pub future_breakage_date: Option, - /// The original diagnostic emitted by the compiler - pub diagnostic: Diagnostic, -} - -/// A diagnostic emitted by the compiler as a JSON message. -/// We only care about the 'rendered' field -#[derive(Serialize, Deserialize)] -pub struct Diagnostic { - pub rendered: String, - pub level: String, -} - -/// The filename in the top-level `target` directory where we store -/// the report -const FUTURE_INCOMPAT_FILE: &str = ".future-incompat-report.json"; -/// Max number of reports to save on disk. -const MAX_REPORTS: usize = 5; - -/// The structure saved to disk containing the reports. -#[derive(Serialize, Deserialize)] -pub struct OnDiskReports { - /// A schema version number, to handle older cargo's from trying to read - /// something that they don't understand. - version: u32, - /// The report ID to use for the next report to save. - next_id: u32, - /// Available reports. - reports: Vec, -} - -/// A single report for a given compilation session. -#[derive(Serialize, Deserialize)] -struct OnDiskReport { - /// Unique reference to the report for the `--id` CLI flag. - id: u32, - /// A message describing suggestions for fixing the - /// reported issues - suggestion_message: String, - /// Report, suitable for printing to the console. - /// Maps package names to the corresponding report - /// We use a `BTreeMap` so that the iteration order - /// is stable across multiple runs of `cargo` - per_package: BTreeMap, -} - -impl Default for OnDiskReports { - fn default() -> OnDiskReports { - OnDiskReports { - version: ON_DISK_VERSION, - next_id: 1, - reports: Vec::new(), - } - } -} - -impl OnDiskReports { - /// Saves a new report. - pub fn save_report( - mut self, - ws: &Workspace<'_>, - suggestion_message: String, - per_package_reports: &[FutureIncompatReportPackage], - ) { - let report = OnDiskReport { - id: self.next_id, - suggestion_message, - per_package: render_report(per_package_reports), - }; - self.next_id += 1; - self.reports.push(report); - if self.reports.len() > MAX_REPORTS { - self.reports.remove(0); - } - let on_disk = serde_json::to_vec(&self).unwrap(); - if let Err(e) = ws - .target_dir() - .open_rw( - FUTURE_INCOMPAT_FILE, - ws.config(), - "Future incompatibility report", - ) - .and_then(|file| { - let mut file = file.file(); - file.set_len(0)?; - file.write_all(&on_disk)?; - Ok(()) - }) - { - crate::display_warning_with_error( - "failed to write on-disk future incompatible report", - &e, - &mut ws.config().shell(), - ); - } - } - - /// Loads the on-disk reports. - pub fn load(ws: &Workspace<'_>) -> CargoResult { - let report_file = match ws.target_dir().open_ro( - FUTURE_INCOMPAT_FILE, - ws.config(), - "Future incompatible report", - ) { - Ok(r) => r, - Err(e) => { - if let Some(io_err) = e.downcast_ref::() { - if io_err.kind() == std::io::ErrorKind::NotFound { - bail!("no reports are currently available"); - } - } - return Err(e); - } - }; - - let mut file_contents = String::new(); - report_file - .file() - .read_to_string(&mut file_contents) - .with_context(|| "failed to read report")?; - let on_disk_reports: OnDiskReports = - serde_json::from_str(&file_contents).with_context(|| "failed to load report")?; - if on_disk_reports.version != ON_DISK_VERSION { - bail!("unable to read reports; reports were saved from a future version of Cargo"); - } - Ok(on_disk_reports) - } - - /// Returns the most recent report ID. - pub fn last_id(&self) -> u32 { - self.reports.last().map(|r| r.id).unwrap() - } - - pub fn get_report( - &self, - id: u32, - config: &Config, - package: Option<&str>, - ) -> CargoResult { - let report = self.reports.iter().find(|r| r.id == id).ok_or_else(|| { - let available = iter_join(self.reports.iter().map(|r| r.id.to_string()), ", "); - format_err!( - "could not find report with ID {}\n\ - Available IDs are: {}", - id, - available - ) - })?; - - let mut to_display = report.suggestion_message.clone(); - to_display += "\n"; - - let package_report = if let Some(package) = package { - report - .per_package - .get(package) - .ok_or_else(|| { - format_err!( - "could not find package with ID `{}`\n - Available packages are: {}\n - Omit the `--package` flag to display a report for all packages", - package, - iter_join(report.per_package.keys(), ", ") - ) - })? - .to_string() - } else { - report - .per_package - .values() - .cloned() - .collect::>() - .join("\n") - }; - to_display += &package_report; - - let shell = config.shell(); - - let to_display = if shell.err_supports_color() && shell.out_supports_color() { - to_display - } else { - strip_ansi_escapes::strip(&to_display) - .map(|v| String::from_utf8(v).expect("utf8")) - .expect("strip should never fail") - }; - Ok(to_display) - } -} - -fn render_report(per_package_reports: &[FutureIncompatReportPackage]) -> BTreeMap { - let mut report: BTreeMap = BTreeMap::new(); - for per_package in per_package_reports { - let package_spec = format!( - "{}:{}", - per_package.package_id.name(), - per_package.package_id.version() - ); - let rendered = report.entry(package_spec).or_default(); - rendered.push_str(&format!( - "The package `{}` currently triggers the following future incompatibility lints:\n", - per_package.package_id - )); - for item in &per_package.items { - rendered.extend( - item.diagnostic - .rendered - .lines() - .map(|l| format!("> {}\n", l)), - ); - } - } - report -} - -/// Returns a user-readable message explaining which of -/// the packages in `package_ids` have updates available. -/// This is best-effort - if an error occurs, `None` will be returned. -fn get_updates(ws: &Workspace<'_>, package_ids: &BTreeSet) -> Option { - // This in general ignores all errors since this is opportunistic. - let _lock = ws.config().acquire_package_cache_lock().ok()?; - // Create a set of updated registry sources. - let map = SourceConfigMap::new(ws.config()).ok()?; - let package_ids: BTreeSet<_> = package_ids - .iter() - .filter(|pkg_id| pkg_id.source_id().is_registry()) - .collect(); - let source_ids: HashSet<_> = package_ids - .iter() - .map(|pkg_id| pkg_id.source_id()) - .collect(); - let mut sources: HashMap<_, _> = source_ids - .into_iter() - .filter_map(|sid| { - let source = map.load(sid, &HashSet::new()).ok()?; - Some((sid, source)) - }) - .collect(); - // Query the sources for new versions. - let mut updates = String::new(); - for pkg_id in package_ids { - let source = match sources.get_mut(&pkg_id.source_id()) { - Some(s) => s, - None => continue, - }; - let dep = Dependency::parse(pkg_id.name(), None, pkg_id.source_id()).ok()?; - let summaries = source.query_vec(&dep).ok()?; - let mut updated_versions: Vec<_> = summaries - .iter() - .map(|summary| summary.version()) - .filter(|version| *version > pkg_id.version()) - .collect(); - updated_versions.sort(); - - let updated_versions = iter_join( - updated_versions - .into_iter() - .map(|version| version.to_string()), - ", ", - ); - - if !updated_versions.is_empty() { - writeln!( - updates, - "{} has the following newer versions available: {}", - pkg_id, updated_versions - ) - .unwrap(); - } - } - Some(updates) -} - -/// Writes a future-incompat report to disk, using the per-package -/// reports gathered during the build. If requested by the user, -/// a message is also displayed in the build output. -pub fn save_and_display_report( - bcx: &BuildContext<'_, '_>, - per_package_future_incompat_reports: &[FutureIncompatReportPackage], -) { - let should_display_message = match bcx.config.future_incompat_config() { - Ok(config) => config.should_display_message(), - Err(e) => { - crate::display_warning_with_error( - "failed to read future-incompat config from disk", - &e, - &mut bcx.config.shell(), - ); - true - } - }; - - if per_package_future_incompat_reports.is_empty() { - // Explicitly passing a command-line flag overrides - // `should_display_message` from the config file - if bcx.build_config.future_incompat_report { - drop( - bcx.config - .shell() - .note("0 dependencies had future-incompatible warnings"), - ); - } - return; - } - - let current_reports = match OnDiskReports::load(bcx.ws) { - Ok(r) => r, - Err(e) => { - log::debug!( - "saving future-incompatible reports failed to load current reports: {:?}", - e - ); - OnDiskReports::default() - } - }; - let report_id = current_reports.next_id; - - // Get a list of unique and sorted package name/versions. - let package_ids: BTreeSet<_> = per_package_future_incompat_reports - .iter() - .map(|r| r.package_id) - .collect(); - let package_vers: Vec<_> = package_ids.iter().map(|pid| pid.to_string()).collect(); - - if should_display_message || bcx.build_config.future_incompat_report { - drop(bcx.config.shell().warn(&format!( - "the following packages contain code that will be rejected by a future \ - version of Rust: {}", - package_vers.join(", ") - ))); - } - - let updated_versions = get_updates(bcx.ws, &package_ids).unwrap_or(String::new()); - - let update_message = if !updated_versions.is_empty() { - format!( - " -- Some affected dependencies have newer versions available. -You may want to consider updating them to a newer version to see if the issue has been fixed. - -{updated_versions}\n", - updated_versions = updated_versions - ) - } else { - String::new() - }; - - let upstream_info = package_ids - .iter() - .map(|package_id| { - let manifest = bcx.packages.get_one(*package_id).unwrap().manifest(); - format!( - " - - {name} - - Repository: {url} - - Detailed warning command: `cargo report future-incompatibilities --id {id} --package {name}`", - name = format!("{}:{}", package_id.name(), package_id.version()), - url = manifest - .metadata() - .repository - .as_deref() - .unwrap_or(""), - id = report_id, - ) - }) - .collect::>() - .join("\n"); - - let suggestion_message = format!( - " -To solve this problem, you can try the following approaches: - -{update_message} -- If the issue is not solved by updating the dependencies, a fix has to be -implemented by those dependencies. You can help with that by notifying the -maintainers of this problem (e.g. by creating a bug report) or by proposing a -fix to the maintainers (e.g. by creating a pull request): -{upstream_info} - -- If waiting for an upstream fix is not an option, you can use the `[patch]` -section in `Cargo.toml` to use your own version of the dependency. For more -information, see: -https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html#the-patch-section - ", - upstream_info = upstream_info, - update_message = update_message, - ); - - current_reports.save_report( - bcx.ws, - suggestion_message.clone(), - per_package_future_incompat_reports, - ); - - if bcx.build_config.future_incompat_report { - drop(bcx.config.shell().note(&suggestion_message)); - drop(bcx.config.shell().note(&format!( - "this report can be shown with `cargo report \ - future-incompatibilities --id {}`", - report_id - ))); - } else if should_display_message { - drop(bcx.config.shell().note(&format!( - "to see what the problems were, use the option \ - `--future-incompat-report`, or run `cargo report \ - future-incompatibilities --id {}`", - report_id - ))); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/job.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/job.rs deleted file mode 100644 index b80b85066..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/job.rs +++ /dev/null @@ -1,92 +0,0 @@ -use std::fmt; -use std::mem; - -use super::job_queue::JobState; -use crate::util::CargoResult; - -pub struct Job { - work: Work, - fresh: Freshness, -} - -/// Each proc should send its description before starting. -/// It should send either once or close immediately. -pub struct Work { - inner: Box) -> CargoResult<()> + Send>, -} - -impl Work { - pub fn new(f: F) -> Work - where - F: FnOnce(&JobState<'_, '_>) -> CargoResult<()> + Send + 'static, - { - Work { inner: Box::new(f) } - } - - pub fn noop() -> Work { - Work::new(|_| Ok(())) - } - - pub fn call(self, tx: &JobState<'_, '_>) -> CargoResult<()> { - (self.inner)(tx) - } - - pub fn then(self, next: Work) -> Work { - Work::new(move |state| { - self.call(state)?; - next.call(state) - }) - } -} - -impl Job { - /// Creates a new job that does nothing. - pub fn new_fresh() -> Job { - Job { - work: Work::noop(), - fresh: Freshness::Fresh, - } - } - - /// Creates a new job representing a unit of work. - pub fn new_dirty(work: Work) -> Job { - Job { - work, - fresh: Freshness::Dirty, - } - } - - /// Consumes this job by running it, returning the result of the - /// computation. - pub fn run(self, state: &JobState<'_, '_>) -> CargoResult<()> { - self.work.call(state) - } - - /// Returns whether this job was fresh/dirty, where "fresh" means we're - /// likely to perform just some small bookkeeping where "dirty" means we'll - /// probably do something slow like invoke rustc. - pub fn freshness(&self) -> Freshness { - self.fresh - } - - pub fn before(&mut self, next: Work) { - let prev = mem::replace(&mut self.work, Work::noop()); - self.work = next.then(prev); - } -} - -impl fmt::Debug for Job { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Job {{ ... }}") - } -} - -/// Indication of the freshness of a package. -/// -/// A fresh package does not necessarily need to be rebuilt (unless a dependency -/// was also rebuilt), and a dirty package must always be rebuilt. -#[derive(PartialEq, Eq, Debug, Clone, Copy)] -pub enum Freshness { - Fresh, - Dirty, -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/job_queue.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/job_queue.rs deleted file mode 100644 index 322205d6e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/job_queue.rs +++ /dev/null @@ -1,1218 +0,0 @@ -//! This module implements the job queue which determines the ordering in which -//! rustc is spawned off. It also manages the allocation of jobserver tokens to -//! rustc beyond the implicit token each rustc owns (i.e., the ones used for -//! parallel LLVM work and parallel rustc threads). -//! -//! Cargo and rustc have a somewhat non-trivial jobserver relationship with each -//! other, which is due to scaling issues with sharing a single jobserver -//! amongst what is potentially hundreds of threads of work on many-cored -//! systems on (at least) linux, and likely other platforms as well. -//! -//! The details of this algorithm are (also) written out in -//! src/librustc_jobserver/lib.rs. What follows is a description focusing on the -//! Cargo side of things. -//! -//! Cargo wants to complete the build as quickly as possible, fully saturating -//! all cores (as constrained by the -j=N) parameter. Cargo also must not spawn -//! more than N threads of work: the total amount of tokens we have floating -//! around must always be limited to N. -//! -//! It is not really possible to optimally choose which crate should build first -//! or last; nor is it possible to decide whether to give an additional token to -//! rustc first or rather spawn a new crate of work. For now, the algorithm we -//! implement prioritizes spawning as many crates (i.e., rustc processes) as -//! possible, and then filling each rustc with tokens on demand. -//! -//! The primary loop is in `drain_the_queue` below. -//! -//! We integrate with the jobserver, originating from GNU make, to make sure -//! that build scripts which use make to build C code can cooperate with us on -//! the number of used tokens and avoid overfilling the system we're on. -//! -//! The jobserver is unfortunately a very simple protocol, so we enhance it a -//! little when we know that there is a rustc on the other end. Via the stderr -//! pipe we have to rustc, we get messages such as "NeedsToken" and -//! "ReleaseToken" from rustc. -//! -//! "NeedsToken" indicates that a rustc is interested in acquiring a token, but -//! never that it would be impossible to make progress without one (i.e., it -//! would be incorrect for rustc to not terminate due to an unfulfilled -//! NeedsToken request); we do not usually fulfill all NeedsToken requests for a -//! given rustc. -//! -//! "ReleaseToken" indicates that a rustc is done with one of its tokens and is -//! ready for us to re-acquire ownership -- we will either release that token -//! back into the general pool or reuse it ourselves. Note that rustc will -//! inform us that it is releasing a token even if it itself is also requesting -//! tokens; is is up to us whether to return the token to that same rustc. -//! -//! The current scheduling algorithm is relatively primitive and could likely be -//! improved. - -use std::cell::{Cell, RefCell}; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::fmt::Write as _; -use std::io; -use std::marker; -use std::sync::Arc; -use std::time::Duration; - -use anyhow::{format_err, Context as _}; -use cargo_util::ProcessBuilder; -use crossbeam_utils::thread::Scope; -use jobserver::{Acquired, Client, HelperThread}; -use log::{debug, info, trace}; -use semver::Version; - -use super::context::OutputFile; -use super::job::{ - Freshness::{self, Dirty, Fresh}, - Job, -}; -use super::timings::Timings; -use super::{BuildContext, BuildPlan, CompileMode, Context, Unit}; -use crate::core::compiler::future_incompat::{ - self, FutureBreakageItem, FutureIncompatReportPackage, -}; -use crate::core::resolver::ResolveBehavior; -use crate::core::{PackageId, Shell, TargetKind}; -use crate::util::diagnostic_server::{self, DiagnosticPrinter}; -use crate::util::machine_message::{self, Message as _}; -use crate::util::CargoResult; -use crate::util::{self, internal, profile}; -use crate::util::{Config, DependencyQueue, Progress, ProgressStyle, Queue}; - -/// This structure is backed by the `DependencyQueue` type and manages the -/// queueing of compilation steps for each package. Packages enqueue units of -/// work and then later on the entire graph is converted to DrainState and -/// executed. -pub struct JobQueue<'cfg> { - queue: DependencyQueue, - counts: HashMap, - timings: Timings<'cfg>, -} - -/// This structure is backed by the `DependencyQueue` type and manages the -/// actual compilation step of each package. Packages enqueue units of work and -/// then later on the entire graph is processed and compiled. -/// -/// It is created from JobQueue when we have fully assembled the crate graph -/// (i.e., all package dependencies are known). -/// -/// # Message queue -/// -/// Each thread running a process uses the message queue to send messages back -/// to the main thread. The main thread coordinates everything, and handles -/// printing output. -/// -/// It is important to be careful which messages use `push` vs `push_bounded`. -/// `push` is for priority messages (like tokens, or "finished") where the -/// sender shouldn't block. We want to handle those so real work can proceed -/// ASAP. -/// -/// `push_bounded` is only for messages being printed to stdout/stderr. Being -/// bounded prevents a flood of messages causing a large amount of memory -/// being used. -/// -/// `push` also avoids blocking which helps avoid deadlocks. For example, when -/// the diagnostic server thread is dropped, it waits for the thread to exit. -/// But if the thread is blocked on a full queue, and there is a critical -/// error, the drop will deadlock. This should be fixed at some point in the -/// future. The jobserver thread has a similar problem, though it will time -/// out after 1 second. -struct DrainState<'cfg> { - // This is the length of the DependencyQueue when starting out - total_units: usize, - - queue: DependencyQueue, - messages: Arc>, - /// Diagnostic deduplication support. - diag_dedupe: DiagDedupe<'cfg>, - /// Count of warnings, used to print a summary after the job succeeds. - /// - /// First value is the total number of warnings, and the second value is - /// the number that were suppressed because they were duplicates of a - /// previous warning. - warning_count: HashMap, - active: HashMap, - compiled: HashSet, - documented: HashSet, - counts: HashMap, - progress: Progress<'cfg>, - next_id: u32, - timings: Timings<'cfg>, - - /// Tokens that are currently owned by this Cargo, and may be "associated" - /// with a rustc process. They may also be unused, though if so will be - /// dropped on the next loop iteration. - /// - /// Note that the length of this may be zero, but we will still spawn work, - /// as we share the implicit token given to this Cargo process with a - /// single rustc process. - tokens: Vec, - - /// rustc per-thread tokens, when in jobserver-per-rustc mode. - rustc_tokens: HashMap>, - - /// This represents the list of rustc jobs (processes) and associated - /// clients that are interested in receiving a token. - to_send_clients: BTreeMap>, - - /// The list of jobs that we have not yet started executing, but have - /// retrieved from the `queue`. We eagerly pull jobs off the main queue to - /// allow us to request jobserver tokens pretty early. - pending_queue: Vec<(Unit, Job)>, - print: DiagnosticPrinter<'cfg>, - - /// How many jobs we've finished - finished: usize, - per_package_future_incompat_reports: Vec, -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] -pub struct JobId(pub u32); - -impl std::fmt::Display for JobId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} - -/// A `JobState` is constructed by `JobQueue::run` and passed to `Job::run`. It includes everything -/// necessary to communicate between the main thread and the execution of the job. -/// -/// The job may execute on either a dedicated thread or the main thread. If the job executes on the -/// main thread, the `output` field must be set to prevent a deadlock. -pub struct JobState<'a, 'cfg> { - /// Channel back to the main thread to coordinate messages and such. - /// - /// When the `output` field is `Some`, care must be taken to avoid calling `push_bounded` on - /// the message queue to prevent a deadlock. - messages: Arc>, - - /// Normally output is sent to the job queue with backpressure. When the job is fresh - /// however we need to immediately display the output to prevent a deadlock as the - /// output messages are processed on the same thread as they are sent from. `output` - /// defines where to output in this case. - /// - /// Currently the `Shell` inside `Config` is wrapped in a `RefCell` and thus can't be passed - /// between threads. This means that it isn't possible for multiple output messages to be - /// interleaved. In the future, it may be wrapped in a `Mutex` instead. In this case - /// interleaving is still prevented as the lock would be held for the whole printing of an - /// output message. - output: Option<&'a DiagDedupe<'cfg>>, - - /// The job id that this state is associated with, used when sending - /// messages back to the main thread. - id: JobId, - - /// Whether or not we're expected to have a call to `rmeta_produced`. Once - /// that method is called this is dynamically set to `false` to prevent - /// sending a double message later on. - rmeta_required: Cell, - - // Historical versions of Cargo made use of the `'a` argument here, so to - // leave the door open to future refactorings keep it here. - _marker: marker::PhantomData<&'a ()>, -} - -/// Handler for deduplicating diagnostics. -struct DiagDedupe<'cfg> { - seen: RefCell>, - config: &'cfg Config, -} - -impl<'cfg> DiagDedupe<'cfg> { - fn new(config: &'cfg Config) -> Self { - DiagDedupe { - seen: RefCell::new(HashSet::new()), - config, - } - } - - /// Emits a diagnostic message. - /// - /// Returns `true` if the message was emitted, or `false` if it was - /// suppressed for being a duplicate. - fn emit_diag(&self, diag: &str) -> CargoResult { - let h = util::hash_u64(diag); - if !self.seen.borrow_mut().insert(h) { - return Ok(false); - } - let mut shell = self.config.shell(); - shell.print_ansi_stderr(diag.as_bytes())?; - shell.err().write_all(b"\n")?; - Ok(true) - } -} - -/// Possible artifacts that can be produced by compilations, used as edge values -/// in the dependency graph. -/// -/// As edge values we can have multiple kinds of edges depending on one node, -/// for example some units may only depend on the metadata for an rlib while -/// others depend on the full rlib. This `Artifact` enum is used to distinguish -/// this case and track the progress of compilations as they proceed. -#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] -enum Artifact { - /// A generic placeholder for "depends on everything run by a step" and - /// means that we can't start the next compilation until the previous has - /// finished entirely. - All, - - /// A node indicating that we only depend on the metadata of a compilation, - /// but the compilation is typically also producing an rlib. We can start - /// our step, however, before the full rlib is available. - Metadata, -} - -enum Message { - Run(JobId, String), - BuildPlanMsg(String, ProcessBuilder, Arc>), - Stdout(String), - Stderr(String), - Diagnostic { - id: JobId, - level: String, - diag: String, - }, - WarningCount { - id: JobId, - emitted: bool, - }, - FixDiagnostic(diagnostic_server::Message), - Token(io::Result), - Finish(JobId, Artifact, CargoResult<()>), - FutureIncompatReport(JobId, Vec), - - // This client should get release_raw called on it with one of our tokens - NeedsToken(JobId), - - // A token previously passed to a NeedsToken client is being released. - ReleaseToken(JobId), -} - -impl<'a, 'cfg> JobState<'a, 'cfg> { - pub fn running(&self, cmd: &ProcessBuilder) { - self.messages.push(Message::Run(self.id, cmd.to_string())); - } - - pub fn build_plan( - &self, - module_name: String, - cmd: ProcessBuilder, - filenames: Arc>, - ) { - self.messages - .push(Message::BuildPlanMsg(module_name, cmd, filenames)); - } - - pub fn stdout(&self, stdout: String) -> CargoResult<()> { - if let Some(dedupe) = self.output { - writeln!(dedupe.config.shell().out(), "{}", stdout)?; - } else { - self.messages.push_bounded(Message::Stdout(stdout)); - } - Ok(()) - } - - pub fn stderr(&self, stderr: String) -> CargoResult<()> { - if let Some(dedupe) = self.output { - let mut shell = dedupe.config.shell(); - shell.print_ansi_stderr(stderr.as_bytes())?; - shell.err().write_all(b"\n")?; - } else { - self.messages.push_bounded(Message::Stderr(stderr)); - } - Ok(()) - } - - pub fn emit_diag(&self, level: String, diag: String) -> CargoResult<()> { - if let Some(dedupe) = self.output { - let emitted = dedupe.emit_diag(&diag)?; - if level == "warning" { - self.messages.push(Message::WarningCount { - id: self.id, - emitted, - }); - } - } else { - self.messages.push_bounded(Message::Diagnostic { - id: self.id, - level, - diag, - }); - } - Ok(()) - } - - /// A method used to signal to the coordinator thread that the rmeta file - /// for an rlib has been produced. This is only called for some rmeta - /// builds when required, and can be called at any time before a job ends. - /// This should only be called once because a metadata file can only be - /// produced once! - pub fn rmeta_produced(&self) { - self.rmeta_required.set(false); - self.messages - .push(Message::Finish(self.id, Artifact::Metadata, Ok(()))); - } - - pub fn future_incompat_report(&self, report: Vec) { - self.messages - .push(Message::FutureIncompatReport(self.id, report)); - } - - /// The rustc underlying this Job is about to acquire a jobserver token (i.e., block) - /// on the passed client. - /// - /// This should arrange for the associated client to eventually get a token via - /// `client.release_raw()`. - pub fn will_acquire(&self) { - self.messages.push(Message::NeedsToken(self.id)); - } - - /// The rustc underlying this Job is informing us that it is done with a jobserver token. - /// - /// Note that it does *not* write that token back anywhere. - pub fn release_token(&self) { - self.messages.push(Message::ReleaseToken(self.id)); - } -} - -impl<'cfg> JobQueue<'cfg> { - pub fn new(bcx: &BuildContext<'_, 'cfg>) -> JobQueue<'cfg> { - JobQueue { - queue: DependencyQueue::new(), - counts: HashMap::new(), - timings: Timings::new(bcx, &bcx.roots), - } - } - - pub fn enqueue(&mut self, cx: &Context<'_, 'cfg>, unit: &Unit, job: Job) -> CargoResult<()> { - let dependencies = cx.unit_deps(unit); - let mut queue_deps = dependencies - .iter() - .filter(|dep| { - // Binaries aren't actually needed to *compile* tests, just to run - // them, so we don't include this dependency edge in the job graph. - !dep.unit.target.is_test() && !dep.unit.target.is_bin() - }) - .map(|dep| { - // Handle the case here where our `unit -> dep` dependency may - // only require the metadata, not the full compilation to - // finish. Use the tables in `cx` to figure out what kind - // of artifact is associated with this dependency. - let artifact = if cx.only_requires_rmeta(unit, &dep.unit) { - Artifact::Metadata - } else { - Artifact::All - }; - (dep.unit.clone(), artifact) - }) - .collect::>(); - - // This is somewhat tricky, but we may need to synthesize some - // dependencies for this target if it requires full upstream - // compilations to have completed. If we're in pipelining mode then some - // dependency edges may be `Metadata` due to the above clause (as - // opposed to everything being `All`). For example consider: - // - // a (binary) - // โ”” b (lib) - // โ”” c (lib) - // - // Here the dependency edge from B to C will be `Metadata`, and the - // dependency edge from A to B will be `All`. For A to be compiled, - // however, it currently actually needs the full rlib of C. This means - // that we need to synthesize a dependency edge for the dependency graph - // from A to C. That's done here. - // - // This will walk all dependencies of the current target, and if any of - // *their* dependencies are `Metadata` then we depend on the `All` of - // the target as well. This should ensure that edges changed to - // `Metadata` propagate upwards `All` dependencies to anything that - // transitively contains the `Metadata` edge. - if unit.requires_upstream_objects() { - for dep in dependencies { - depend_on_deps_of_deps(cx, &mut queue_deps, dep.unit.clone()); - } - - fn depend_on_deps_of_deps( - cx: &Context<'_, '_>, - deps: &mut HashMap, - unit: Unit, - ) { - for dep in cx.unit_deps(&unit) { - if deps.insert(dep.unit.clone(), Artifact::All).is_none() { - depend_on_deps_of_deps(cx, deps, dep.unit.clone()); - } - } - } - } - - // For now we use a fixed placeholder value for the cost of each unit, but - // in the future this could be used to allow users to provide hints about - // relative expected costs of units, or this could be automatically set in - // a smarter way using timing data from a previous compilation. - self.queue.queue(unit.clone(), job, queue_deps, 100); - *self.counts.entry(unit.pkg.package_id()).or_insert(0) += 1; - Ok(()) - } - - /// Executes all jobs necessary to build the dependency graph. - /// - /// This function will spawn off `config.jobs()` workers to build all of the - /// necessary dependencies, in order. Freshness is propagated as far as - /// possible along each dependency chain. - pub fn execute(mut self, cx: &mut Context<'_, '_>, plan: &mut BuildPlan) -> CargoResult<()> { - let _p = profile::start("executing the job graph"); - self.queue.queue_finished(); - - let progress = Progress::with_style("Building", ProgressStyle::Ratio, cx.bcx.config); - let state = DrainState { - total_units: self.queue.len(), - queue: self.queue, - // 100 here is somewhat arbitrary. It is a few screenfulls of - // output, and hopefully at most a few megabytes of memory for - // typical messages. If you change this, please update the test - // caching_large_output, too. - messages: Arc::new(Queue::new(100)), - diag_dedupe: DiagDedupe::new(cx.bcx.config), - warning_count: HashMap::new(), - active: HashMap::new(), - compiled: HashSet::new(), - documented: HashSet::new(), - counts: self.counts, - progress, - next_id: 0, - timings: self.timings, - tokens: Vec::new(), - rustc_tokens: HashMap::new(), - to_send_clients: BTreeMap::new(), - pending_queue: Vec::new(), - print: DiagnosticPrinter::new(cx.bcx.config), - finished: 0, - per_package_future_incompat_reports: Vec::new(), - }; - - // Create a helper thread for acquiring jobserver tokens - let messages = state.messages.clone(); - let helper = cx - .jobserver - .clone() - .into_helper_thread(move |token| { - messages.push(Message::Token(token)); - }) - .with_context(|| "failed to create helper thread for jobserver management")?; - - // Create a helper thread to manage the diagnostics for rustfix if - // necessary. - let messages = state.messages.clone(); - // It is important that this uses `push` instead of `push_bounded` for - // now. If someone wants to fix this to be bounded, the `drop` - // implementation needs to be changed to avoid possible deadlocks. - let _diagnostic_server = cx - .bcx - .build_config - .rustfix_diagnostic_server - .borrow_mut() - .take() - .map(move |srv| srv.start(move |msg| messages.push(Message::FixDiagnostic(msg)))); - - crossbeam_utils::thread::scope(move |scope| { - match state.drain_the_queue(cx, plan, scope, &helper) { - Some(err) => Err(err), - None => Ok(()), - } - }) - .expect("child threads shouldn't panic") - } -} - -impl<'cfg> DrainState<'cfg> { - fn spawn_work_if_possible( - &mut self, - cx: &mut Context<'_, '_>, - jobserver_helper: &HelperThread, - scope: &Scope<'_>, - ) -> CargoResult<()> { - // Dequeue as much work as we can, learning about everything - // possible that can run. Note that this is also the point where we - // start requesting job tokens. Each job after the first needs to - // request a token. - while let Some((unit, job)) = self.queue.dequeue() { - self.pending_queue.push((unit, job)); - if self.active.len() + self.pending_queue.len() > 1 { - jobserver_helper.request_token(); - } - } - - // Now that we've learned of all possible work that we can execute - // try to spawn it so long as we've got a jobserver token which says - // we're able to perform some parallel work. - while self.has_extra_tokens() && !self.pending_queue.is_empty() { - let (unit, job) = self.pending_queue.remove(0); - *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1; - if !cx.bcx.build_config.build_plan { - // Print out some nice progress information. - // NOTE: An error here will drop the job without starting it. - // That should be OK, since we want to exit as soon as - // possible during an error. - self.note_working_on(cx.bcx.config, &unit, job.freshness())?; - } - self.run(&unit, job, cx, scope); - } - - Ok(()) - } - - fn has_extra_tokens(&self) -> bool { - self.active.len() < self.tokens.len() + 1 - } - - // The oldest job (i.e., least job ID) is the one we grant tokens to first. - fn pop_waiting_client(&mut self) -> (JobId, Client) { - // FIXME: replace this with BTreeMap::first_entry when that stabilizes. - let key = *self - .to_send_clients - .keys() - .next() - .expect("at least one waiter"); - let clients = self.to_send_clients.get_mut(&key).unwrap(); - let client = clients.pop().unwrap(); - if clients.is_empty() { - self.to_send_clients.remove(&key); - } - (key, client) - } - - // If we managed to acquire some extra tokens, send them off to a waiting rustc. - fn grant_rustc_token_requests(&mut self) -> CargoResult<()> { - while !self.to_send_clients.is_empty() && self.has_extra_tokens() { - let (id, client) = self.pop_waiting_client(); - // This unwrap is guaranteed to succeed. `active` must be at least - // length 1, as otherwise there can't be a client waiting to be sent - // on, so tokens.len() must also be at least one. - let token = self.tokens.pop().unwrap(); - self.rustc_tokens - .entry(id) - .or_insert_with(Vec::new) - .push(token); - client - .release_raw() - .with_context(|| "failed to release jobserver token")?; - } - - Ok(()) - } - - fn handle_event( - &mut self, - cx: &mut Context<'_, '_>, - jobserver_helper: &HelperThread, - plan: &mut BuildPlan, - event: Message, - ) -> CargoResult<()> { - match event { - Message::Run(id, cmd) => { - cx.bcx - .config - .shell() - .verbose(|c| c.status("Running", &cmd))?; - self.timings.unit_start(id, self.active[&id].clone()); - } - Message::BuildPlanMsg(module_name, cmd, filenames) => { - plan.update(&module_name, &cmd, &filenames)?; - } - Message::Stdout(out) => { - writeln!(cx.bcx.config.shell().out(), "{}", out)?; - } - Message::Stderr(err) => { - let mut shell = cx.bcx.config.shell(); - shell.print_ansi_stderr(err.as_bytes())?; - shell.err().write_all(b"\n")?; - } - Message::Diagnostic { id, level, diag } => { - let emitted = self.diag_dedupe.emit_diag(&diag)?; - if level == "warning" { - self.bump_warning_count(id, emitted); - } - } - Message::WarningCount { id, emitted } => { - self.bump_warning_count(id, emitted); - } - Message::FixDiagnostic(msg) => { - self.print.print(&msg)?; - } - Message::Finish(id, artifact, result) => { - let unit = match artifact { - // If `id` has completely finished we remove it - // from the `active` map ... - Artifact::All => { - info!("end: {:?}", id); - self.finished += 1; - if let Some(rustc_tokens) = self.rustc_tokens.remove(&id) { - // This puts back the tokens that this rustc - // acquired into our primary token list. - // - // This represents a rustc bug: it did not - // release all of its thread tokens but finished - // completely. But we want to make Cargo resilient - // to such rustc bugs, as they're generally not - // fatal in nature (i.e., Cargo can make progress - // still, and the build might not even fail). - self.tokens.extend(rustc_tokens); - } - self.to_send_clients.remove(&id); - self.report_warning_count(cx.bcx.config, id); - self.active.remove(&id).unwrap() - } - // ... otherwise if it hasn't finished we leave it - // in there as we'll get another `Finish` later on. - Artifact::Metadata => { - info!("end (meta): {:?}", id); - self.active[&id].clone() - } - }; - info!("end ({:?}): {:?}", unit, result); - match result { - Ok(()) => self.finish(id, &unit, artifact, cx)?, - Err(e) => { - let msg = "The following warnings were emitted during compilation:"; - self.emit_warnings(Some(msg), &unit, cx)?; - self.back_compat_notice(cx, &unit)?; - return Err(e); - } - } - } - Message::FutureIncompatReport(id, items) => { - let package_id = self.active[&id].pkg.package_id(); - self.per_package_future_incompat_reports - .push(FutureIncompatReportPackage { package_id, items }); - } - Message::Token(acquired_token) => { - let token = acquired_token.with_context(|| "failed to acquire jobserver token")?; - self.tokens.push(token); - } - Message::NeedsToken(id) => { - log::info!("queue token request"); - jobserver_helper.request_token(); - let client = cx.rustc_clients[&self.active[&id]].clone(); - self.to_send_clients - .entry(id) - .or_insert_with(Vec::new) - .push(client); - } - Message::ReleaseToken(id) => { - // Note that this pops off potentially a completely - // different token, but all tokens of the same job are - // conceptually the same so that's fine. - // - // self.tokens is a "pool" -- the order doesn't matter -- and - // this transfers ownership of the token into that pool. If we - // end up using it on the next go around, then this token will - // be truncated, same as tokens obtained through Message::Token. - let rustc_tokens = self - .rustc_tokens - .get_mut(&id) - .expect("no tokens associated"); - self.tokens - .push(rustc_tokens.pop().expect("rustc releases token it has")); - } - } - - Ok(()) - } - - // This will also tick the progress bar as appropriate - fn wait_for_events(&mut self) -> Vec { - // Drain all events at once to avoid displaying the progress bar - // unnecessarily. If there's no events we actually block waiting for - // an event, but we keep a "heartbeat" going to allow `record_cpu` - // to run above to calculate CPU usage over time. To do this we - // listen for a message with a timeout, and on timeout we run the - // previous parts of the loop again. - let mut events = self.messages.try_pop_all(); - info!( - "tokens in use: {}, rustc_tokens: {:?}, waiting_rustcs: {:?} (events this tick: {})", - self.tokens.len(), - self.rustc_tokens - .iter() - .map(|(k, j)| (k, j.len())) - .collect::>(), - self.to_send_clients - .iter() - .map(|(k, j)| (k, j.len())) - .collect::>(), - events.len(), - ); - if events.is_empty() { - loop { - self.tick_progress(); - self.tokens.truncate(self.active.len() - 1); - match self.messages.pop(Duration::from_millis(500)) { - Some(message) => { - events.push(message); - break; - } - None => continue, - } - } - } - events - } - - /// This is the "main" loop, where Cargo does all work to run the - /// compiler. - /// - /// This returns an Option to prevent the use of `?` on `Result` types - /// because it is important for the loop to carefully handle errors. - fn drain_the_queue( - mut self, - cx: &mut Context<'_, '_>, - plan: &mut BuildPlan, - scope: &Scope<'_>, - jobserver_helper: &HelperThread, - ) -> Option { - trace!("queue: {:#?}", self.queue); - - // Iteratively execute the entire dependency graph. Each turn of the - // loop starts out by scheduling as much work as possible (up to the - // maximum number of parallel jobs we have tokens for). A local queue - // is maintained separately from the main dependency queue as one - // dequeue may actually dequeue quite a bit of work (e.g., 10 binaries - // in one package). - // - // After a job has finished we update our internal state if it was - // successful and otherwise wait for pending work to finish if it failed - // and then immediately return. - let mut error = None; - // CAUTION! Do not use `?` or break out of the loop early. Every error - // must be handled in such a way that the loop is still allowed to - // drain event messages. - loop { - if error.is_none() { - if let Err(e) = self.spawn_work_if_possible(cx, jobserver_helper, scope) { - self.handle_error(&mut cx.bcx.config.shell(), &mut error, e); - } - } - - // If after all that we're not actually running anything then we're - // done! - if self.active.is_empty() { - break; - } - - if let Err(e) = self.grant_rustc_token_requests() { - self.handle_error(&mut cx.bcx.config.shell(), &mut error, e); - } - - // And finally, before we block waiting for the next event, drop any - // excess tokens we may have accidentally acquired. Due to how our - // jobserver interface is architected we may acquire a token that we - // don't actually use, and if this happens just relinquish it back - // to the jobserver itself. - for event in self.wait_for_events() { - if let Err(event_err) = self.handle_event(cx, jobserver_helper, plan, event) { - self.handle_error(&mut cx.bcx.config.shell(), &mut error, event_err); - } - } - } - self.progress.clear(); - - let profile_name = cx.bcx.build_config.requested_profile; - // NOTE: this may be a bit inaccurate, since this may not display the - // profile for what was actually built. Profile overrides can change - // these settings, and in some cases different targets are built with - // different profiles. To be accurate, it would need to collect a - // list of Units built, and maybe display a list of the different - // profiles used. However, to keep it simple and compatible with old - // behavior, we just display what the base profile is. - let profile = cx.bcx.profiles.base_profile(); - let mut opt_type = String::from(if profile.opt_level.as_str() == "0" { - "unoptimized" - } else { - "optimized" - }); - if profile.debuginfo.unwrap_or(0) != 0 { - opt_type += " + debuginfo"; - } - - let time_elapsed = util::elapsed(cx.bcx.config.creation_time().elapsed()); - if let Err(e) = self.timings.finished(cx.bcx, &error) { - if error.is_some() { - crate::display_error(&e, &mut cx.bcx.config.shell()); - } else { - return Some(e); - } - } - if cx.bcx.build_config.emit_json() { - let mut shell = cx.bcx.config.shell(); - let msg = machine_message::BuildFinished { - success: error.is_none(), - } - .to_json_string(); - if let Err(e) = writeln!(shell.out(), "{}", msg) { - if error.is_some() { - crate::display_error(&e.into(), &mut shell); - } else { - return Some(e.into()); - } - } - } - - if let Some(e) = error { - Some(e) - } else if self.queue.is_empty() && self.pending_queue.is_empty() { - let message = format!( - "{} [{}] target(s) in {}", - profile_name, opt_type, time_elapsed - ); - if !cx.bcx.build_config.build_plan { - // It doesn't really matter if this fails. - drop(cx.bcx.config.shell().status("Finished", message)); - future_incompat::save_and_display_report( - cx.bcx, - &self.per_package_future_incompat_reports, - ); - } - - None - } else { - debug!("queue: {:#?}", self.queue); - Some(internal("finished with jobs still left in the queue")) - } - } - - fn handle_error( - &self, - shell: &mut Shell, - err_state: &mut Option, - new_err: anyhow::Error, - ) { - if err_state.is_some() { - // Already encountered one error. - log::warn!("{:?}", new_err); - } else if !self.active.is_empty() { - crate::display_error(&new_err, shell); - drop(shell.warn("build failed, waiting for other jobs to finish...")); - *err_state = Some(anyhow::format_err!("build failed")); - } else { - *err_state = Some(new_err); - } - } - - // This also records CPU usage and marks concurrency; we roughly want to do - // this as often as we spin on the events receiver (at least every 500ms or - // so). - fn tick_progress(&mut self) { - // Record some timing information if `-Ztimings` is enabled, and - // this'll end up being a noop if we're not recording this - // information. - self.timings.mark_concurrency( - self.active.len(), - self.pending_queue.len(), - self.queue.len(), - self.rustc_tokens.len(), - ); - self.timings.record_cpu(); - - let active_names = self - .active - .values() - .map(|u| self.name_for_progress(u)) - .collect::>(); - drop(self.progress.tick_now( - self.finished, - self.total_units, - &format!(": {}", active_names.join(", ")), - )); - } - - fn name_for_progress(&self, unit: &Unit) -> String { - let pkg_name = unit.pkg.name(); - let target_name = unit.target.name(); - match unit.mode { - CompileMode::Doc { .. } => format!("{}(doc)", pkg_name), - CompileMode::RunCustomBuild => format!("{}(build)", pkg_name), - CompileMode::Test | CompileMode::Check { test: true } => match unit.target.kind() { - TargetKind::Lib(_) => format!("{}(test)", target_name), - TargetKind::CustomBuild => panic!("cannot test build script"), - TargetKind::Bin => format!("{}(bin test)", target_name), - TargetKind::Test => format!("{}(test)", target_name), - TargetKind::Bench => format!("{}(bench)", target_name), - TargetKind::ExampleBin | TargetKind::ExampleLib(_) => { - format!("{}(example test)", target_name) - } - }, - _ => match unit.target.kind() { - TargetKind::Lib(_) => pkg_name.to_string(), - TargetKind::CustomBuild => format!("{}(build.rs)", pkg_name), - TargetKind::Bin => format!("{}(bin)", target_name), - TargetKind::Test => format!("{}(test)", target_name), - TargetKind::Bench => format!("{}(bench)", target_name), - TargetKind::ExampleBin | TargetKind::ExampleLib(_) => { - format!("{}(example)", target_name) - } - }, - } - } - - /// Executes a job. - /// - /// Fresh jobs block until finished (which should be very fast!), Dirty - /// jobs will spawn a thread in the background and return immediately. - fn run(&mut self, unit: &Unit, job: Job, cx: &Context<'_, '_>, scope: &Scope<'_>) { - let id = JobId(self.next_id); - self.next_id = self.next_id.checked_add(1).unwrap(); - - info!("start {}: {:?}", id, unit); - - assert!(self.active.insert(id, unit.clone()).is_none()); - - let messages = self.messages.clone(); - let fresh = job.freshness(); - let rmeta_required = cx.rmeta_required(unit); - - let doit = move |state: JobState<'_, '_>| { - let mut sender = FinishOnDrop { - messages: &state.messages, - id, - result: None, - }; - sender.result = Some(job.run(&state)); - - // If the `rmeta_required` wasn't consumed but it was set - // previously, then we either have: - // - // 1. The `job` didn't do anything because it was "fresh". - // 2. The `job` returned an error and didn't reach the point where - // it called `rmeta_produced`. - // 3. We forgot to call `rmeta_produced` and there's a bug in Cargo. - // - // Ruling out the third, the other two are pretty common for 2 - // we'll just naturally abort the compilation operation but for 1 - // we need to make sure that the metadata is flagged as produced so - // send a synthetic message here. - if state.rmeta_required.get() && sender.result.as_ref().unwrap().is_ok() { - state - .messages - .push(Message::Finish(state.id, Artifact::Metadata, Ok(()))); - } - - // Use a helper struct with a `Drop` implementation to guarantee - // that a `Finish` message is sent even if our job panics. We - // shouldn't panic unless there's a bug in Cargo, so we just need - // to make sure nothing hangs by accident. - struct FinishOnDrop<'a> { - messages: &'a Queue, - id: JobId, - result: Option>, - } - - impl Drop for FinishOnDrop<'_> { - fn drop(&mut self) { - let result = self - .result - .take() - .unwrap_or_else(|| Err(format_err!("worker panicked"))); - self.messages - .push(Message::Finish(self.id, Artifact::All, result)); - } - } - }; - - match fresh { - Freshness::Fresh => { - self.timings.add_fresh(); - // Running a fresh job on the same thread is often much faster than spawning a new - // thread to run the job. - doit(JobState { - id, - messages, - output: Some(&self.diag_dedupe), - rmeta_required: Cell::new(rmeta_required), - _marker: marker::PhantomData, - }); - } - Freshness::Dirty => { - self.timings.add_dirty(); - scope.spawn(move |_| { - doit(JobState { - id, - messages: messages.clone(), - output: None, - rmeta_required: Cell::new(rmeta_required), - _marker: marker::PhantomData, - }) - }); - } - } - } - - fn emit_warnings( - &mut self, - msg: Option<&str>, - unit: &Unit, - cx: &mut Context<'_, '_>, - ) -> CargoResult<()> { - let outputs = cx.build_script_outputs.lock().unwrap(); - let metadata = match cx.find_build_script_metadata(unit) { - Some(metadata) => metadata, - None => return Ok(()), - }; - let bcx = &mut cx.bcx; - if let Some(output) = outputs.get(metadata) { - if !output.warnings.is_empty() { - if let Some(msg) = msg { - writeln!(bcx.config.shell().err(), "{}\n", msg)?; - } - - for warning in output.warnings.iter() { - bcx.config.shell().warn(warning)?; - } - - if msg.is_some() { - // Output an empty line. - writeln!(bcx.config.shell().err())?; - } - } - } - - Ok(()) - } - - fn bump_warning_count(&mut self, id: JobId, emitted: bool) { - let cnts = self.warning_count.entry(id).or_default(); - cnts.0 += 1; - if !emitted { - cnts.1 += 1; - } - } - - /// Displays a final report of the warnings emitted by a particular job. - fn report_warning_count(&mut self, config: &Config, id: JobId) { - let count = match self.warning_count.remove(&id) { - Some(count) => count, - None => return, - }; - let unit = &self.active[&id]; - let mut message = format!("`{}` ({}", unit.pkg.name(), unit.target.description_named()); - if unit.mode.is_rustc_test() && !(unit.target.is_test() || unit.target.is_bench()) { - message.push_str(" test"); - } else if unit.mode.is_doc_test() { - message.push_str(" doctest"); - } else if unit.mode.is_doc() { - message.push_str(" doc"); - } - message.push_str(") generated "); - match count.0 { - 1 => message.push_str("1 warning"), - n => drop(write!(message, "{} warnings", n)), - }; - match count.1 { - 0 => {} - 1 => message.push_str(" (1 duplicate)"), - n => drop(write!(message, " ({} duplicates)", n)), - } - // Errors are ignored here because it is tricky to handle them - // correctly, and they aren't important. - drop(config.shell().warn(message)); - } - - fn finish( - &mut self, - id: JobId, - unit: &Unit, - artifact: Artifact, - cx: &mut Context<'_, '_>, - ) -> CargoResult<()> { - if unit.mode.is_run_custom_build() && unit.show_warnings(cx.bcx.config) { - self.emit_warnings(None, unit, cx)?; - } - let unlocked = self.queue.finish(unit, &artifact); - match artifact { - Artifact::All => self.timings.unit_finished(id, unlocked), - Artifact::Metadata => self.timings.unit_rmeta_finished(id, unlocked), - } - Ok(()) - } - - // This isn't super trivial because we don't want to print loads and - // loads of information to the console, but we also want to produce a - // faithful representation of what's happening. This is somewhat nuanced - // as a package can start compiling *very* early on because of custom - // build commands and such. - // - // In general, we try to print "Compiling" for the first nontrivial task - // run for a package, regardless of when that is. We then don't print - // out any more information for a package after we've printed it once. - fn note_working_on( - &mut self, - config: &Config, - unit: &Unit, - fresh: Freshness, - ) -> CargoResult<()> { - if (self.compiled.contains(&unit.pkg.package_id()) && !unit.mode.is_doc()) - || (self.documented.contains(&unit.pkg.package_id()) && unit.mode.is_doc()) - { - return Ok(()); - } - - match fresh { - // Any dirty stage which runs at least one command gets printed as - // being a compiled package. - Dirty => { - if unit.mode.is_doc() { - self.documented.insert(unit.pkg.package_id()); - config.shell().status("Documenting", &unit.pkg)?; - } else if unit.mode.is_doc_test() { - // Skip doc test. - } else { - self.compiled.insert(unit.pkg.package_id()); - if unit.mode.is_check() { - config.shell().status("Checking", &unit.pkg)?; - } else { - config.shell().status("Compiling", &unit.pkg)?; - } - } - } - Fresh => { - // If doc test are last, only print "Fresh" if nothing has been printed. - if self.counts[&unit.pkg.package_id()] == 0 - && !(unit.mode.is_doc_test() && self.compiled.contains(&unit.pkg.package_id())) - { - self.compiled.insert(unit.pkg.package_id()); - config.shell().verbose(|c| c.status("Fresh", &unit.pkg))?; - } - } - } - Ok(()) - } - - fn back_compat_notice(&self, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> { - if unit.pkg.name() != "diesel" - || unit.pkg.version() >= &Version::new(1, 4, 8) - || cx.bcx.ws.resolve_behavior() == ResolveBehavior::V1 - || !unit.pkg.package_id().source_id().is_registry() - || !unit.features.is_empty() - { - return Ok(()); - } - if !cx - .bcx - .unit_graph - .keys() - .any(|unit| unit.pkg.name() == "diesel" && !unit.features.is_empty()) - { - return Ok(()); - } - cx.bcx.config.shell().note( - "\ -This error may be due to an interaction between diesel and Cargo's new -feature resolver. Try updating to diesel 1.4.8 to fix this error. -", - )?; - Ok(()) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/layout.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/layout.rs deleted file mode 100644 index e3dd6eaf1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/layout.rs +++ /dev/null @@ -1,230 +0,0 @@ -//! Management of the directory layout of a build -//! -//! The directory layout is a little tricky at times, hence a separate file to -//! house this logic. The current layout looks like this: -//! -//! ```text -//! # This is the root directory for all output, the top-level package -//! # places all of its output here. -//! target/ -//! -//! # Cache of `rustc -Vv` output for performance. -//! .rustc-info.json -//! -//! # All final artifacts are linked into this directory from `deps`. -//! # Note that named profiles will soon be included as separate directories -//! # here. They have a restricted format, similar to Rust identifiers, so -//! # Cargo-specific directories added in the future should use some prefix -//! # like `.` to avoid name collisions. -//! debug/ # or release/ -//! -//! # File used to lock the directory to prevent multiple cargo processes -//! # from using it at the same time. -//! .cargo-lock -//! -//! # Hidden directory that holds all of the fingerprint files for all -//! # packages -//! .fingerprint/ -//! # Each package is in a separate directory. -//! # Note that different target kinds have different filename prefixes. -//! $pkgname-$META/ -//! # Set of source filenames for this package. -//! dep-lib-$targetname -//! # Timestamp when this package was last built. -//! invoked.timestamp -//! # The fingerprint hash. -//! lib-$targetname -//! # Detailed information used for logging the reason why -//! # something is being recompiled. -//! lib-$targetname.json -//! # The console output from the compiler. This is cached -//! # so that warnings can be redisplayed for "fresh" units. -//! output-lib-$targetname -//! -//! # This is the root directory for all rustc artifacts except build -//! # scripts, examples, and test and bench executables. Almost every -//! # artifact should have a metadata hash added to its filename to -//! # prevent collisions. One notable exception is dynamic libraries. -//! deps/ -//! -//! # Root directory for all compiled examples. -//! examples/ -//! -//! # Directory used to store incremental data for the compiler (when -//! # incremental is enabled. -//! incremental/ -//! -//! # This is the location at which the output of all custom build -//! # commands are rooted. -//! build/ -//! -//! # Each package gets its own directory where its build script and -//! # script output are placed -//! $pkgname-$META/ # For the build script itself. -//! # The build script executable (name may be changed by user). -//! build-script-build-$META -//! # Hard link to build-script-build-$META. -//! build-script-build -//! # Dependency information generated by rustc. -//! build-script-build-$META.d -//! # Debug information, depending on platform and profile -//! # settings. -//! -//! -//! # The package shows up twice with two different metadata hashes. -//! $pkgname-$META/ # For the output of the build script. -//! # Timestamp when the build script was last executed. -//! invoked.timestamp -//! # Directory where script can output files ($OUT_DIR). -//! out/ -//! # Output from the build script. -//! output -//! # Path to `out`, used to help when the target directory is -//! # moved. -//! root-output -//! # Stderr output from the build script. -//! stderr -//! -//! # Output from rustdoc -//! doc/ -//! -//! # Used by `cargo package` and `cargo publish` to build a `.crate` file. -//! package/ -//! -//! # Experimental feature for generated build scripts. -//! .metabuild/ -//! ``` -//! -//! When cross-compiling, the layout is the same, except it appears in -//! `target/$TRIPLE`. - -use crate::core::compiler::CompileTarget; -use crate::core::Workspace; -use crate::util::{CargoResult, FileLock}; -use cargo_util::paths; -use std::path::{Path, PathBuf}; - -/// Contains the paths of all target output locations. -/// -/// See module docs for more information. -pub struct Layout { - /// The root directory: `/path/to/target`. - /// If cross compiling: `/path/to/target/$TRIPLE`. - root: PathBuf, - /// The final artifact destination: `$root/debug` (or `release`). - dest: PathBuf, - /// The directory with rustc artifacts: `$dest/deps` - deps: PathBuf, - /// The directory for build scripts: `$dest/build` - build: PathBuf, - /// The directory for incremental files: `$dest/incremental` - incremental: PathBuf, - /// The directory for fingerprints: `$dest/.fingerprint` - fingerprint: PathBuf, - /// The directory for examples: `$dest/examples` - examples: PathBuf, - /// The directory for rustdoc output: `$root/doc` - doc: PathBuf, - /// The directory for temporary data of integration tests and benches: `$dest/tmp` - tmp: PathBuf, - /// The lockfile for a build (`.cargo-lock`). Will be unlocked when this - /// struct is `drop`ped. - _lock: FileLock, -} - -impl Layout { - /// Calculate the paths for build output, lock the build directory, and return as a Layout. - /// - /// This function will block if the directory is already locked. - /// - /// `dest` should be the final artifact directory name. Currently either - /// "debug" or "release". - pub fn new( - ws: &Workspace<'_>, - target: Option, - dest: &str, - ) -> CargoResult { - let mut root = ws.target_dir(); - if let Some(target) = target { - root.push(target.short_name()); - } - let dest = root.join(dest); - // If the root directory doesn't already exist go ahead and create it - // here. Use this opportunity to exclude it from backups as well if the - // system supports it since this is a freshly created folder. - // - paths::create_dir_all_excluded_from_backups_atomic(root.as_path_unlocked())?; - // Now that the excluded from backups target root is created we can create the - // actual destination (sub)subdirectory. - paths::create_dir_all(dest.as_path_unlocked())?; - - // For now we don't do any more finer-grained locking on the artifact - // directory, so just lock the entire thing for the duration of this - // compile. - let lock = dest.open_rw(".cargo-lock", ws.config(), "build directory")?; - let root = root.into_path_unlocked(); - let dest = dest.into_path_unlocked(); - - Ok(Layout { - deps: dest.join("deps"), - build: dest.join("build"), - incremental: dest.join("incremental"), - fingerprint: dest.join(".fingerprint"), - examples: dest.join("examples"), - doc: root.join("doc"), - tmp: root.join("tmp"), - root, - dest, - _lock: lock, - }) - } - - /// Makes sure all directories stored in the Layout exist on the filesystem. - pub fn prepare(&mut self) -> CargoResult<()> { - paths::create_dir_all(&self.deps)?; - paths::create_dir_all(&self.incremental)?; - paths::create_dir_all(&self.fingerprint)?; - paths::create_dir_all(&self.examples)?; - paths::create_dir_all(&self.build)?; - - Ok(()) - } - - /// Fetch the destination path for final artifacts (`/โ€ฆ/target/debug`). - pub fn dest(&self) -> &Path { - &self.dest - } - /// Fetch the deps path. - pub fn deps(&self) -> &Path { - &self.deps - } - /// Fetch the examples path. - pub fn examples(&self) -> &Path { - &self.examples - } - /// Fetch the doc path. - pub fn doc(&self) -> &Path { - &self.doc - } - /// Fetch the root path (`/โ€ฆ/target`). - pub fn root(&self) -> &Path { - &self.root - } - /// Fetch the incremental path. - pub fn incremental(&self) -> &Path { - &self.incremental - } - /// Fetch the fingerprint path. - pub fn fingerprint(&self) -> &Path { - &self.fingerprint - } - /// Fetch the build script path. - pub fn build(&self) -> &Path { - &self.build - } - /// Create and return the tmp path. - pub fn prepare_tmp(&self) -> CargoResult<&Path> { - paths::create_dir_all(&self.tmp)?; - Ok(&self.tmp) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/links.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/links.rs deleted file mode 100644 index 34c021f93..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/links.rs +++ /dev/null @@ -1,57 +0,0 @@ -use super::unit_graph::UnitGraph; -use crate::core::resolver::errors::describe_path; -use crate::core::{PackageId, Resolve}; -use crate::util::errors::CargoResult; -use std::collections::{HashMap, HashSet}; - -/// Validate `links` field does not conflict between packages. -pub fn validate_links(resolve: &Resolve, unit_graph: &UnitGraph) -> CargoResult<()> { - // NOTE: This is the *old* links validator. Links are usually validated in - // the resolver. However, the `links` field was added to the index in - // early 2018 (see https://github.com/rust-lang/cargo/pull/4978). However, - // `links` has been around since 2014, so there are still many crates in - // the index that don't have `links` properly set in the index (over 600 - // at the time of this writing in 2019). This can probably be removed at - // some point in the future, though it might be worth considering fixing - // the index. - let mut validated: HashSet = HashSet::new(); - let mut links: HashMap = HashMap::new(); - let mut units: Vec<_> = unit_graph.keys().collect(); - // Sort primarily to make testing easier. - units.sort_unstable(); - for unit in units { - if !validated.insert(unit.pkg.package_id()) { - continue; - } - let lib = match unit.pkg.manifest().links() { - Some(lib) => lib, - None => continue, - }; - if let Some(&prev) = links.get(lib) { - let prev_path = resolve - .path_to_top(&prev) - .into_iter() - .map(|(p, d)| (p, d.and_then(|d| d.iter().next()))); - let pkg = unit.pkg.package_id(); - let path = resolve - .path_to_top(&pkg) - .into_iter() - .map(|(p, d)| (p, d.and_then(|d| d.iter().next()))); - anyhow::bail!( - "multiple packages link to native library `{}`, \ - but a native library can be linked only once\n\ - \n\ - {}\nlinks to native library `{}`\n\ - \n\ - {}\nalso links to native library `{}`", - lib, - describe_path(prev_path), - lib, - describe_path(path), - lib - ) - } - links.insert(lib.to_string(), unit.pkg.package_id()); - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/lto.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/lto.rs deleted file mode 100644 index 661d1d266..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/lto.rs +++ /dev/null @@ -1,192 +0,0 @@ -use crate::core::compiler::{BuildContext, CompileMode, CrateType, Unit}; -use crate::core::profiles; -use crate::util::interning::InternedString; - -use crate::util::errors::CargoResult; -use std::collections::hash_map::{Entry, HashMap}; - -/// Possible ways to run rustc and request various parts of LTO. -/// -/// Variant | Flag | Object Code | Bitcode -/// -------------------|------------------------|-------------|-------- -/// `Run` | `-C lto=foo` | n/a | n/a -/// `Off` | `-C lto=off` | n/a | n/a -/// `OnlyBitcode` | `-C linker-plugin-lto` | | โœ“ -/// `ObjectAndBitcode` | | โœ“ | โœ“ -/// `OnlyObject` | `-C embed-bitcode=no` | โœ“ | -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub enum Lto { - /// LTO is run for this rustc, and it's `-Clto=foo`. If the given value is - /// None, that corresponds to `-Clto` with no argument, which means do - /// "fat" LTO. - Run(Option), - - /// LTO has been explicitly listed as "off". This means no thin-local-LTO, - /// no LTO anywhere, I really mean it! - Off, - - /// This rustc invocation only needs to produce bitcode (it is *only* used - /// for LTO), there's no need to produce object files, so we can pass - /// `-Clinker-plugin-lto` - OnlyBitcode, - - /// This rustc invocation needs to embed bitcode in object files. This means - /// that object files may be used for a normal link, and the crate may be - /// loaded for LTO later, so both are required. - ObjectAndBitcode, - - /// This should not include bitcode. This is primarily to reduce disk - /// space usage. - OnlyObject, -} - -pub fn generate(bcx: &BuildContext<'_, '_>) -> CargoResult> { - let mut map = HashMap::new(); - for unit in bcx.roots.iter() { - let root_lto = match unit.profile.lto { - // LTO not requested, no need for bitcode. - profiles::Lto::Bool(false) => Lto::OnlyObject, - profiles::Lto::Off => Lto::Off, - _ => { - let crate_types = unit.target.rustc_crate_types(); - if unit.target.for_host() { - Lto::OnlyObject - } else if needs_object(&crate_types) { - lto_when_needs_object(&crate_types) - } else { - // This may or may not participate in LTO, let's start - // with the minimum requirements. This may be expanded in - // `calculate` below if necessary. - Lto::OnlyBitcode - } - } - }; - calculate(bcx, &mut map, unit, root_lto)?; - } - Ok(map) -} - -/// Whether or not any of these crate types need object code. -fn needs_object(crate_types: &[CrateType]) -> bool { - crate_types.iter().any(|k| k.can_lto() || k.is_dynamic()) -} - -/// Lto setting to use when this unit needs object code. -fn lto_when_needs_object(crate_types: &[CrateType]) -> Lto { - if crate_types.iter().all(|ct| *ct == CrateType::Dylib) { - // A dylib whose parent is running LTO. rustc currently - // doesn't support LTO with dylibs, so bitcode is not - // needed. - Lto::OnlyObject - } else { - // Mixed rlib with a dylib or cdylib whose parent is running LTO. This - // needs both: bitcode for the rlib (for LTO) and object code for the - // dylib. - Lto::ObjectAndBitcode - } -} - -fn calculate( - bcx: &BuildContext<'_, '_>, - map: &mut HashMap, - unit: &Unit, - parent_lto: Lto, -) -> CargoResult<()> { - let crate_types = match unit.mode { - // Note: Doctest ignores LTO, but for now we'll compute it as-if it is - // a Bin, in case it is ever supported in the future. - CompileMode::Test | CompileMode::Bench | CompileMode::Doctest => vec![CrateType::Bin], - // Notes on other modes: - // - Check: Treat as the underlying type, it doesn't really matter. - // - Doc: LTO is N/A for the Doc unit itself since rustdoc does not - // support codegen flags. We still compute the dependencies, which - // are mostly `Check`. - // - RunCustomBuild is ignored because it is always "for_host". - _ => unit.target.rustc_crate_types(), - }; - // LTO can only be performed if *all* of the crate types support it. - // For example, a cdylib/rlib combination won't allow LTO. - let all_lto_types = crate_types.iter().all(CrateType::can_lto); - // Compute the LTO based on the profile, and what our parent requires. - let lto = if unit.target.for_host() { - // Disable LTO for host builds since we only really want to perform LTO - // for the final binary, and LTO on plugins/build scripts/proc macros is - // largely not desired. - Lto::OnlyObject - } else if all_lto_types { - // Note that this ignores the `parent_lto` because this isn't a - // linkable crate type; this unit is not being embedded in the parent. - match unit.profile.lto { - profiles::Lto::Named(s) => Lto::Run(Some(s)), - profiles::Lto::Off => Lto::Off, - profiles::Lto::Bool(true) => Lto::Run(None), - profiles::Lto::Bool(false) => Lto::OnlyObject, - } - } else { - match (parent_lto, needs_object(&crate_types)) { - // An rlib whose parent is running LTO, we only need bitcode. - (Lto::Run(_), false) => Lto::OnlyBitcode, - // LTO when something needs object code. - (Lto::Run(_), true) | (Lto::OnlyBitcode, true) => lto_when_needs_object(&crate_types), - // LTO is disabled, continue to disable it. - (Lto::Off, _) => Lto::Off, - // If this doesn't have any requirements, or the requirements are - // already satisfied, then stay with our parent. - (_, false) | (Lto::OnlyObject, true) | (Lto::ObjectAndBitcode, true) => parent_lto, - } - }; - - // Merge the computed LTO. If this unit appears multiple times in the - // graph, the merge may expand the requirements. - let merged_lto = match map.entry(unit.clone()) { - // If we haven't seen this unit before then insert our value and keep - // going. - Entry::Vacant(v) => *v.insert(lto), - - Entry::Occupied(mut v) => { - let result = match (lto, v.get()) { - // No change in requirements. - (Lto::OnlyBitcode, Lto::OnlyBitcode) => Lto::OnlyBitcode, - (Lto::OnlyObject, Lto::OnlyObject) => Lto::OnlyObject, - - // Once we're running LTO we keep running LTO. We should always - // calculate the same thing here each iteration because if we - // see this twice then it means, for example, two unit tests - // depend on a binary, which is normal. - (Lto::Run(s), _) | (_, &Lto::Run(s)) => Lto::Run(s), - - // Off means off! This has the same reasoning as `Lto::Run`. - (Lto::Off, _) | (_, Lto::Off) => Lto::Off, - - // Once a target has requested both, that's the maximal amount - // of work that can be done, so we just keep doing that work. - (Lto::ObjectAndBitcode, _) | (_, Lto::ObjectAndBitcode) => Lto::ObjectAndBitcode, - - // Upgrade so that both requirements can be met. - // - // This is where the trickiness happens. This unit needs - // bitcode and the previously calculated value for this unit - // says it didn't need bitcode (or vice versa). This means that - // we're a shared dependency between some targets which require - // LTO and some which don't. This means that instead of being - // either only-objects or only-bitcode we have to embed both in - // rlibs (used for different compilations), so we switch to - // including both. - (Lto::OnlyObject, Lto::OnlyBitcode) | (Lto::OnlyBitcode, Lto::OnlyObject) => { - Lto::ObjectAndBitcode - } - }; - // No need to recurse if we calculated the same value as before. - if result == *v.get() { - return Ok(()); - } - v.insert(result); - result - } - }; - - for dep in &bcx.unit_graph[unit] { - calculate(bcx, map, &dep.unit, merged_lto)?; - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/mod.rs deleted file mode 100644 index b4cd14421..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/mod.rs +++ /dev/null @@ -1,1558 +0,0 @@ -mod build_config; -mod build_context; -mod build_plan; -mod compilation; -mod compile_kind; -mod context; -mod crate_type; -mod custom_build; -mod fingerprint; -pub mod future_incompat; -mod job; -mod job_queue; -mod layout; -mod links; -mod lto; -mod output_depinfo; -pub mod rustdoc; -pub mod standard_lib; -mod timings; -mod unit; -pub mod unit_dependencies; -pub mod unit_graph; - -use std::collections::HashSet; -use std::env; -use std::ffi::{OsStr, OsString}; -use std::fs::{self, File}; -use std::io::{BufRead, Write}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use anyhow::{Context as _, Error}; -use lazycell::LazyCell; -use log::debug; - -pub use self::build_config::{BuildConfig, CompileMode, MessageFormat}; -pub use self::build_context::{ - BuildContext, FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, -}; -use self::build_plan::BuildPlan; -pub use self::compilation::{Compilation, Doctest, UnitOutput}; -pub use self::compile_kind::{CompileKind, CompileTarget}; -pub use self::context::{Context, Metadata}; -pub use self::crate_type::CrateType; -pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; -pub use self::job::Freshness; -use self::job::{Job, Work}; -use self::job_queue::{JobQueue, JobState}; -pub(crate) use self::layout::Layout; -pub use self::lto::Lto; -use self::output_depinfo::output_depinfo; -use self::unit_graph::UnitDep; -use crate::core::compiler::future_incompat::FutureIncompatReport; -pub use crate::core::compiler::unit::{Unit, UnitInterner}; -use crate::core::manifest::TargetSourcePath; -use crate::core::profiles::{PanicStrategy, Profile, Strip}; -use crate::core::{Feature, PackageId, Target}; -use crate::util::errors::{CargoResult, VerboseError}; -use crate::util::interning::InternedString; -use crate::util::machine_message::{self, Message}; -use crate::util::{add_path_args, internal, iter_join_onto, profile}; -use cargo_util::{paths, ProcessBuilder, ProcessError}; - -const RUSTDOC_CRATE_VERSION_FLAG: &str = "--crate-version"; - -#[derive(Clone, Hash, Debug, PartialEq, Eq)] -pub enum LinkType { - All, - Cdylib, - Bin, - SingleBin(String), - Test, - Bench, - Example, -} - -impl LinkType { - pub fn applies_to(&self, target: &Target) -> bool { - match self { - LinkType::All => true, - LinkType::Cdylib => target.is_cdylib(), - LinkType::Bin => target.is_bin(), - LinkType::SingleBin(name) => target.is_bin() && target.name() == name, - LinkType::Test => target.is_test(), - LinkType::Bench => target.is_bench(), - LinkType::Example => target.is_exe_example(), - } - } -} - -/// A glorified callback for executing calls to rustc. Rather than calling rustc -/// directly, we'll use an `Executor`, giving clients an opportunity to intercept -/// the build calls. -pub trait Executor: Send + Sync + 'static { - /// Called after a rustc process invocation is prepared up-front for a given - /// unit of work (may still be modified for runtime-known dependencies, when - /// the work is actually executed). - fn init(&self, _cx: &Context<'_, '_>, _unit: &Unit) {} - - /// In case of an `Err`, Cargo will not continue with the build process for - /// this package. - fn exec( - &self, - cmd: &ProcessBuilder, - id: PackageId, - target: &Target, - mode: CompileMode, - on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, - on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, - ) -> CargoResult<()>; - - /// Queried when queuing each unit of work. If it returns true, then the - /// unit will always be rebuilt, independent of whether it needs to be. - fn force_rebuild(&self, _unit: &Unit) -> bool { - false - } -} - -/// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's -/// default behaviour. -#[derive(Copy, Clone)] -pub struct DefaultExecutor; - -impl Executor for DefaultExecutor { - fn exec( - &self, - cmd: &ProcessBuilder, - _id: PackageId, - _target: &Target, - _mode: CompileMode, - on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, - on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, - ) -> CargoResult<()> { - cmd.exec_with_streaming(on_stdout_line, on_stderr_line, false) - .map(drop) - } -} - -fn compile<'cfg>( - cx: &mut Context<'_, 'cfg>, - jobs: &mut JobQueue<'cfg>, - plan: &mut BuildPlan, - unit: &Unit, - exec: &Arc, - force_rebuild: bool, -) -> CargoResult<()> { - let bcx = cx.bcx; - let build_plan = bcx.build_config.build_plan; - if !cx.compiled.insert(unit.clone()) { - return Ok(()); - } - - // Build up the work to be done to compile this unit, enqueuing it once - // we've got everything constructed. - let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); - fingerprint::prepare_init(cx, unit)?; - - let job = if unit.mode.is_run_custom_build() { - custom_build::prepare(cx, unit)? - } else if unit.mode.is_doc_test() { - // We run these targets later, so this is just a no-op for now. - Job::new_fresh() - } else if build_plan { - Job::new_dirty(rustc(cx, unit, &exec.clone())?) - } else { - let force = exec.force_rebuild(unit) || force_rebuild; - let mut job = fingerprint::prepare_target(cx, unit, force)?; - job.before(if job.freshness() == Freshness::Dirty { - let work = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { - rustdoc(cx, unit)? - } else { - rustc(cx, unit, exec)? - }; - work.then(link_targets(cx, unit, false)?) - } else { - // We always replay the output cache, - // since it might contain future-incompat-report messages - let work = replay_output_cache( - unit.pkg.package_id(), - PathBuf::from(unit.pkg.manifest_path()), - &unit.target, - cx.files().message_cache_path(unit), - cx.bcx.build_config.message_format, - cx.bcx.config.shell().err_supports_color(), - unit.show_warnings(bcx.config), - ); - // Need to link targets on both the dirty and fresh. - work.then(link_targets(cx, unit, true)?) - }); - - job - }; - jobs.enqueue(cx, unit, job)?; - drop(p); - - // Be sure to compile all dependencies of this target as well. - let deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. - for dep in deps { - compile(cx, jobs, plan, &dep.unit, exec, false)?; - } - if build_plan { - plan.add(cx, unit)?; - } - - Ok(()) -} - -fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc) -> CargoResult { - let mut rustc = prepare_rustc(cx, &unit.target.rustc_crate_types(), unit)?; - let build_plan = cx.bcx.build_config.build_plan; - - let name = unit.pkg.name().to_string(); - let buildkey = unit.buildkey(); - - add_cap_lints(cx.bcx, unit, &mut rustc); - - let outputs = cx.outputs(unit)?; - let root = cx.files().out_dir(unit); - - // Prepare the native lib state (extra `-L` and `-l` flags). - let build_script_outputs = Arc::clone(&cx.build_script_outputs); - let current_id = unit.pkg.package_id(); - let manifest_path = PathBuf::from(unit.pkg.manifest_path()); - let build_scripts = cx.build_scripts.get(unit).cloned(); - - // If we are a binary and the package also contains a library, then we - // don't pass the `-l` flags. - let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); - - let dep_info_name = if cx.files().use_extra_filename(unit) { - format!( - "{}-{}.d", - unit.target.crate_name(), - cx.files().metadata(unit) - ) - } else { - format!("{}.d", unit.target.crate_name()) - }; - let rustc_dep_info_loc = root.join(dep_info_name); - let dep_info_loc = fingerprint::dep_info_loc(cx, unit); - - rustc.args(cx.bcx.rustflags_args(unit)); - if cx.bcx.config.cli_unstable().binary_dep_depinfo { - rustc.arg("-Z").arg("binary-dep-depinfo"); - } - let mut output_options = OutputOptions::new(cx, unit); - let package_id = unit.pkg.package_id(); - let target = Target::clone(&unit.target); - let mode = unit.mode; - - exec.init(cx, unit); - let exec = exec.clone(); - - let root_output = cx.files().host_dest().to_path_buf(); - let target_dir = cx.bcx.ws.target_dir().into_path_unlocked(); - let pkg_root = unit.pkg.root().to_path_buf(); - let cwd = rustc - .get_cwd() - .unwrap_or_else(|| cx.bcx.config.cwd()) - .to_path_buf(); - let fingerprint_dir = cx.files().fingerprint_dir(unit); - let script_metadata = cx.find_build_script_metadata(unit); - let is_local = unit.is_local(); - - return Ok(Work::new(move |state| { - // Only at runtime have we discovered what the extra -L and -l - // arguments are for native libraries, so we process those here. We - // also need to be sure to add any -L paths for our plugins to the - // dynamic library load path as a plugin's dynamic library may be - // located somewhere in there. - // Finally, if custom environment variables have been produced by - // previous build scripts, we include them in the rustc invocation. - if let Some(build_scripts) = build_scripts { - let script_outputs = build_script_outputs.lock().unwrap(); - if !build_plan { - add_native_deps( - &mut rustc, - &script_outputs, - &build_scripts, - pass_l_flag, - &target, - current_id, - )?; - add_plugin_deps(&mut rustc, &script_outputs, &build_scripts, &root_output)?; - } - add_custom_env(&mut rustc, &script_outputs, script_metadata); - } - - for output in outputs.iter() { - // If there is both an rmeta and rlib, rustc will prefer to use the - // rlib, even if it is older. Therefore, we must delete the rlib to - // force using the new rmeta. - if output.path.extension() == Some(OsStr::new("rmeta")) { - let dst = root.join(&output.path).with_extension("rlib"); - if dst.exists() { - paths::remove_file(&dst)?; - } - } - } - - fn verbose_if_simple_exit_code(err: Error) -> Error { - // If a signal on unix (`code == None`) or an abnormal termination - // on Windows (codes like `0xC0000409`), don't hide the error details. - match err - .downcast_ref::() - .as_ref() - .and_then(|perr| perr.code) - { - Some(n) if cargo_util::is_simple_exit_code(n) => VerboseError::new(err).into(), - _ => err, - } - } - - state.running(&rustc); - let timestamp = paths::set_invocation_time(&fingerprint_dir)?; - if build_plan { - state.build_plan(buildkey, rustc.clone(), outputs.clone()); - } else { - exec.exec( - &rustc, - package_id, - &target, - mode, - &mut |line| on_stdout_line(state, line, package_id, &target), - &mut |line| { - on_stderr_line( - state, - line, - package_id, - &manifest_path, - &target, - &mut output_options, - ) - }, - ) - .map_err(verbose_if_simple_exit_code) - .with_context(|| { - // adapted from rustc_errors/src/lib.rs - let warnings = match output_options.warnings_seen { - 0 => String::new(), - 1 => "; 1 warning emitted".to_string(), - count => format!("; {} warnings emitted", count), - }; - let errors = match output_options.errors_seen { - 0 => String::new(), - 1 => " due to previous error".to_string(), - count => format!(" due to {} previous errors", count), - }; - format!("could not compile `{}`{}{}", name, errors, warnings) - })?; - // Exec should never return with success *and* generate an error. - debug_assert_eq!(output_options.errors_seen, 0); - } - - if rustc_dep_info_loc.exists() { - fingerprint::translate_dep_info( - &rustc_dep_info_loc, - &dep_info_loc, - &cwd, - &pkg_root, - &target_dir, - &rustc, - // Do not track source files in the fingerprint for registry dependencies. - is_local, - ) - .with_context(|| { - internal(format!( - "could not parse/generate dep info at: {}", - rustc_dep_info_loc.display() - )) - })?; - // This mtime shift allows Cargo to detect if a source file was - // modified in the middle of the build. - paths::set_file_time_no_err(dep_info_loc, timestamp); - } - - Ok(()) - })); - - // Add all relevant `-L` and `-l` flags from dependencies (now calculated and - // present in `state`) to the command provided. - fn add_native_deps( - rustc: &mut ProcessBuilder, - build_script_outputs: &BuildScriptOutputs, - build_scripts: &BuildScripts, - pass_l_flag: bool, - target: &Target, - current_id: PackageId, - ) -> CargoResult<()> { - for key in build_scripts.to_link.iter() { - let output = build_script_outputs.get(key.1).ok_or_else(|| { - internal(format!( - "couldn't find build script output for {}/{}", - key.0, key.1 - )) - })?; - for path in output.library_paths.iter() { - rustc.arg("-L").arg(path); - } - - if key.0 == current_id { - for cfg in &output.cfgs { - rustc.arg("--cfg").arg(cfg); - } - if pass_l_flag { - for name in output.library_links.iter() { - rustc.arg("-l").arg(name); - } - } - } - - for (lt, arg) in &output.linker_args { - // There was an unintentional change where cdylibs were - // allowed to be passed via transitive dependencies. This - // clause should have been kept in the `if` block above. For - // now, continue allowing it for cdylib only. - // See https://github.com/rust-lang/cargo/issues/9562 - if lt.applies_to(target) && (key.0 == current_id || *lt == LinkType::Cdylib) { - rustc.arg("-C").arg(format!("link-arg={}", arg)); - } - } - } - Ok(()) - } - - // Add all custom environment variables present in `state` (after they've - // been put there by one of the `build_scripts`) to the command provided. - fn add_custom_env( - rustc: &mut ProcessBuilder, - build_script_outputs: &BuildScriptOutputs, - metadata: Option, - ) { - if let Some(metadata) = metadata { - if let Some(output) = build_script_outputs.get(metadata) { - for &(ref name, ref value) in output.env.iter() { - rustc.env(name, value); - } - } - } - } -} - -/// Link the compiled target (often of form `foo-{metadata_hash}`) to the -/// final target. This must happen during both "Fresh" and "Compile". -fn link_targets(cx: &mut Context<'_, '_>, unit: &Unit, fresh: bool) -> CargoResult { - let bcx = cx.bcx; - let outputs = cx.outputs(unit)?; - let export_dir = cx.files().export_dir(); - let package_id = unit.pkg.package_id(); - let manifest_path = PathBuf::from(unit.pkg.manifest_path()); - let profile = unit.profile; - let unit_mode = unit.mode; - let features = unit.features.iter().map(|s| s.to_string()).collect(); - let json_messages = bcx.build_config.emit_json(); - let executable = cx.get_executable(unit)?; - let mut target = Target::clone(&unit.target); - if let TargetSourcePath::Metabuild = target.src_path() { - // Give it something to serialize. - let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); - target.set_src_path(TargetSourcePath::Path(path)); - } - - Ok(Work::new(move |state| { - // If we're a "root crate", e.g., the target of this compilation, then we - // hard link our outputs out of the `deps` directory into the directory - // above. This means that `cargo build` will produce binaries in - // `target/debug` which one probably expects. - let mut destinations = vec![]; - for output in outputs.iter() { - let src = &output.path; - // This may have been a `cargo rustc` command which changes the - // output, so the source may not actually exist. - if !src.exists() { - continue; - } - let dst = match output.hardlink.as_ref() { - Some(dst) => dst, - None => { - destinations.push(src.clone()); - continue; - } - }; - destinations.push(dst.clone()); - paths::link_or_copy(src, dst)?; - if let Some(ref path) = output.export_path { - let export_dir = export_dir.as_ref().unwrap(); - paths::create_dir_all(export_dir)?; - - paths::link_or_copy(src, path)?; - } - } - - if json_messages { - let art_profile = machine_message::ArtifactProfile { - opt_level: profile.opt_level.as_str(), - debuginfo: profile.debuginfo, - debug_assertions: profile.debug_assertions, - overflow_checks: profile.overflow_checks, - test: unit_mode.is_any_test(), - }; - - let msg = machine_message::Artifact { - package_id, - manifest_path, - target: &target, - profile: art_profile, - features, - filenames: destinations, - executable, - fresh, - } - .to_json_string(); - state.stdout(msg)?; - } - Ok(()) - })) -} - -// For all plugin dependencies, add their -L paths (now calculated and present -// in `build_script_outputs`) to the dynamic library load path for the command -// to execute. -fn add_plugin_deps( - rustc: &mut ProcessBuilder, - build_script_outputs: &BuildScriptOutputs, - build_scripts: &BuildScripts, - root_output: &Path, -) -> CargoResult<()> { - let var = paths::dylib_path_envvar(); - let search_path = rustc.get_env(var).unwrap_or_default(); - let mut search_path = env::split_paths(&search_path).collect::>(); - for (pkg_id, metadata) in &build_scripts.plugins { - let output = build_script_outputs - .get(*metadata) - .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", pkg_id)))?; - search_path.append(&mut filter_dynamic_search_path( - output.library_paths.iter(), - root_output, - )); - } - let search_path = paths::join_paths(&search_path, var)?; - rustc.env(var, &search_path); - Ok(()) -} - -// Determine paths to add to the dynamic search path from -L entries -// -// Strip off prefixes like "native=" or "framework=" and filter out directories -// **not** inside our output directory since they are likely spurious and can cause -// clashes with system shared libraries (issue #3366). -fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &Path) -> Vec -where - I: Iterator, -{ - let mut search_path = vec![]; - for dir in paths { - let dir = match dir.to_str() { - Some(s) => { - let mut parts = s.splitn(2, '='); - match (parts.next(), parts.next()) { - (Some("native"), Some(path)) - | (Some("crate"), Some(path)) - | (Some("dependency"), Some(path)) - | (Some("framework"), Some(path)) - | (Some("all"), Some(path)) => path.into(), - _ => dir.clone(), - } - } - None => dir.clone(), - }; - if dir.starts_with(&root_output) { - search_path.push(dir); - } else { - debug!( - "Not including path {} in runtime library search path because it is \ - outside target root {}", - dir.display(), - root_output.display() - ); - } - } - search_path -} - -fn prepare_rustc( - cx: &mut Context<'_, '_>, - crate_types: &[CrateType], - unit: &Unit, -) -> CargoResult { - let is_primary = cx.is_primary_package(unit); - let is_workspace = cx.bcx.ws.is_member(&unit.pkg); - - let mut base = cx - .compilation - .rustc_process(unit, is_primary, is_workspace)?; - - if is_primary { - base.env("CARGO_PRIMARY_PACKAGE", "1"); - } - - if unit.target.is_test() || unit.target.is_bench() { - let tmp = cx.files().layout(unit.kind).prepare_tmp()?; - base.env("CARGO_TARGET_TMPDIR", tmp.display().to_string()); - } - - if cx.bcx.config.cli_unstable().jobserver_per_rustc { - let client = cx.new_jobserver()?; - base.inherit_jobserver(&client); - base.arg("-Z").arg("jobserver-token-requests"); - assert!(cx.rustc_clients.insert(unit.clone(), client).is_none()); - } else { - base.inherit_jobserver(&cx.jobserver); - } - build_base_args(cx, &mut base, unit, crate_types)?; - build_deps_args(&mut base, cx, unit)?; - Ok(base) -} - -fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult { - let bcx = cx.bcx; - // script_metadata is not needed here, it is only for tests. - let mut rustdoc = cx.compilation.rustdoc_process(unit, None)?; - rustdoc.inherit_jobserver(&cx.jobserver); - let crate_name = unit.target.crate_name(); - rustdoc.arg("--crate-name").arg(&crate_name); - add_path_args(bcx.ws, unit, &mut rustdoc); - add_cap_lints(bcx, unit, &mut rustdoc); - - if let CompileKind::Target(target) = unit.kind { - rustdoc.arg("--target").arg(target.rustc_target()); - } - let doc_dir = cx.files().out_dir(unit); - - // Create the documentation directory ahead of time as rustdoc currently has - // a bug where concurrent invocations will race to create this directory if - // it doesn't already exist. - paths::create_dir_all(&doc_dir)?; - - rustdoc.arg("-o").arg(&doc_dir); - - for feat in &unit.features { - rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); - } - - add_error_format_and_color(cx, &mut rustdoc, unit, false); - add_allow_features(cx, &mut rustdoc); - - if let Some(args) = cx.bcx.extra_args_for(unit) { - rustdoc.args(args); - } - - let metadata = cx.metadata_for_doc_units[unit]; - rustdoc.arg("-C").arg(format!("metadata={}", metadata)); - - let scrape_output_path = |unit: &Unit| -> CargoResult { - let output_dir = cx.files().deps_dir(unit); - Ok(output_dir.join(format!("{}.examples", unit.buildkey()))) - }; - - if unit.mode.is_doc_scrape() { - debug_assert!(cx.bcx.scrape_units.contains(unit)); - - rustdoc.arg("-Zunstable-options"); - - rustdoc - .arg("--scrape-examples-output-path") - .arg(scrape_output_path(unit)?); - - // Only scrape example for items from crates in the workspace, to reduce generated file size - for pkg in cx.bcx.ws.members() { - let names = pkg - .targets() - .iter() - .map(|target| target.crate_name()) - .collect::>(); - for name in names { - rustdoc.arg("--scrape-examples-target-crate").arg(name); - } - } - } else if cx.bcx.scrape_units.len() > 0 && cx.bcx.ws.is_member(&unit.pkg) { - // We only pass scraped examples to packages in the workspace - // since examples are only coming from reverse-dependencies of workspace packages - - rustdoc.arg("-Zunstable-options"); - - for scrape_unit in &cx.bcx.scrape_units { - rustdoc - .arg("--with-examples") - .arg(scrape_output_path(scrape_unit)?); - } - } - - build_deps_args(&mut rustdoc, cx, unit)?; - rustdoc::add_root_urls(cx, unit, &mut rustdoc)?; - - rustdoc.args(bcx.rustdocflags_args(unit)); - - if !crate_version_flag_already_present(&rustdoc) { - append_crate_version_flag(unit, &mut rustdoc); - } - - let name = unit.pkg.name().to_string(); - let build_script_outputs = Arc::clone(&cx.build_script_outputs); - let package_id = unit.pkg.package_id(); - let manifest_path = PathBuf::from(unit.pkg.manifest_path()); - let target = Target::clone(&unit.target); - let mut output_options = OutputOptions::new(cx, unit); - let script_metadata = cx.find_build_script_metadata(unit); - Ok(Work::new(move |state| { - if let Some(script_metadata) = script_metadata { - if let Some(output) = build_script_outputs.lock().unwrap().get(script_metadata) { - for cfg in output.cfgs.iter() { - rustdoc.arg("--cfg").arg(cfg); - } - for &(ref name, ref value) in output.env.iter() { - rustdoc.env(name, value); - } - } - } - let crate_dir = doc_dir.join(&crate_name); - if crate_dir.exists() { - // Remove output from a previous build. This ensures that stale - // files for removed items are removed. - log::debug!("removing pre-existing doc directory {:?}", crate_dir); - paths::remove_dir_all(crate_dir)?; - } - state.running(&rustdoc); - - rustdoc - .exec_with_streaming( - &mut |line| on_stdout_line(state, line, package_id, &target), - &mut |line| { - on_stderr_line( - state, - line, - package_id, - &manifest_path, - &target, - &mut output_options, - ) - }, - false, - ) - .with_context(|| format!("could not document `{}`", name))?; - Ok(()) - })) -} - -// The --crate-version flag could have already been passed in RUSTDOCFLAGS -// or as an extra compiler argument for rustdoc -fn crate_version_flag_already_present(rustdoc: &ProcessBuilder) -> bool { - rustdoc.get_args().iter().any(|flag| { - flag.to_str() - .map_or(false, |flag| flag.starts_with(RUSTDOC_CRATE_VERSION_FLAG)) - }) -} - -fn append_crate_version_flag(unit: &Unit, rustdoc: &mut ProcessBuilder) { - rustdoc - .arg(RUSTDOC_CRATE_VERSION_FLAG) - .arg(unit.pkg.version().to_string()); -} - -fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) { - // If this is an upstream dep we don't want warnings from, turn off all - // lints. - if !unit.show_warnings(bcx.config) { - cmd.arg("--cap-lints").arg("allow"); - - // If this is an upstream dep but we *do* want warnings, make sure that they - // don't fail compilation. - } else if !unit.is_local() { - cmd.arg("--cap-lints").arg("warn"); - } -} - -/// Forward -Zallow-features if it is set for cargo. -fn add_allow_features(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { - if let Some(allow) = &cx.bcx.config.cli_unstable().allow_features { - let mut arg = String::from("-Zallow-features="); - let _ = iter_join_onto(&mut arg, allow, ","); - cmd.arg(&arg); - } -} - -/// Add error-format flags to the command. -/// -/// Cargo always uses JSON output. This has several benefits, such as being -/// easier to parse, handles changing formats (for replaying cached messages), -/// ensures atomic output (so messages aren't interleaved), allows for -/// intercepting messages like rmeta artifacts, etc. rustc includes a -/// "rendered" field in the JSON message with the message properly formatted, -/// which Cargo will extract and display to the user. -fn add_error_format_and_color( - cx: &Context<'_, '_>, - cmd: &mut ProcessBuilder, - unit: &Unit, - pipelined: bool, -) { - cmd.arg("--error-format=json"); - let mut json = String::from("--json=diagnostic-rendered-ansi"); - if pipelined { - // Pipelining needs to know when rmeta files are finished. Tell rustc - // to emit a message that cargo will intercept. - json.push_str(",artifacts"); - } - if cx - .bcx - .target_data - .info(unit.kind) - .supports_json_future_incompat - { - // Emit a future-incompat report (when supported by rustc), so we can report - // future-incompat dependencies to the user - json.push_str(",future-incompat"); - } - - match cx.bcx.build_config.message_format { - MessageFormat::Short | MessageFormat::Json { short: true, .. } => { - json.push_str(",diagnostic-short"); - } - _ => {} - } - cmd.arg(json); - - let config = cx.bcx.config; - if config.nightly_features_allowed { - match ( - config.cli_unstable().terminal_width, - config.shell().err_width().diagnostic_terminal_width(), - ) { - // Terminal width explicitly provided - only useful for testing. - (Some(Some(width)), _) => { - cmd.arg(format!("-Zterminal-width={}", width)); - } - // Terminal width was not explicitly provided but flag was provided - common case. - (Some(None), Some(width)) => { - cmd.arg(format!("-Zterminal-width={}", width)); - } - // User didn't opt-in. - _ => (), - } - } -} - -fn build_base_args( - cx: &mut Context<'_, '_>, - cmd: &mut ProcessBuilder, - unit: &Unit, - crate_types: &[CrateType], -) -> CargoResult<()> { - assert!(!unit.mode.is_run_custom_build()); - - let bcx = cx.bcx; - let Profile { - ref opt_level, - codegen_backend, - codegen_units, - debuginfo, - debug_assertions, - split_debuginfo, - overflow_checks, - rpath, - ref panic, - incremental, - strip, - .. - } = unit.profile; - let test = unit.mode.is_any_test(); - - cmd.arg("--crate-name").arg(&unit.target.crate_name()); - - let edition = unit.target.edition(); - edition.cmd_edition_arg(cmd); - - add_path_args(bcx.ws, unit, cmd); - add_error_format_and_color(cx, cmd, unit, cx.rmeta_required(unit)); - add_allow_features(cx, cmd); - - let mut contains_dy_lib = false; - if !test { - let mut crate_types = &crate_types - .iter() - .map(|t| t.as_str().to_string()) - .collect::>(); - if let Some(types) = cx.bcx.rustc_crate_types_args_for(unit) { - crate_types = types; - } - for crate_type in crate_types.iter() { - cmd.arg("--crate-type").arg(crate_type); - if crate_type == CrateType::Dylib.as_str() { - contains_dy_lib = true; - } - } - } - - if unit.mode.is_check() { - cmd.arg("--emit=dep-info,metadata"); - } else if !unit.requires_upstream_objects() { - // Always produce metadata files for rlib outputs. Metadata may be used - // in this session for a pipelined compilation, or it may be used in a - // future Cargo session as part of a pipelined compile. - cmd.arg("--emit=dep-info,metadata,link"); - } else { - cmd.arg("--emit=dep-info,link"); - } - - let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) - || (contains_dy_lib && !cx.is_primary_package(unit)); - if prefer_dynamic { - cmd.arg("-C").arg("prefer-dynamic"); - } - - if opt_level.as_str() != "0" { - cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); - } - - if *panic != PanicStrategy::Unwind { - cmd.arg("-C").arg(format!("panic={}", panic)); - } - - cmd.args(<o_args(cx, unit)); - - // This is generally just an optimization on build time so if we don't pass - // it then it's ok. As of the time of this writing it's a very new flag, so - // we need to dynamically check if it's available. - if cx.bcx.target_data.info(unit.kind).supports_split_debuginfo { - if let Some(split) = split_debuginfo { - cmd.arg("-C").arg(format!("split-debuginfo={}", split)); - } - } - - if let Some(backend) = codegen_backend { - cmd.arg("-Z").arg(&format!("codegen-backend={}", backend)); - } - - if let Some(n) = codegen_units { - cmd.arg("-C").arg(&format!("codegen-units={}", n)); - } - - if let Some(debuginfo) = debuginfo { - cmd.arg("-C").arg(format!("debuginfo={}", debuginfo)); - } - - if let Some(args) = cx.bcx.extra_args_for(unit) { - cmd.args(args); - } - - // `-C overflow-checks` is implied by the setting of `-C debug-assertions`, - // so we only need to provide `-C overflow-checks` if it differs from - // the value of `-C debug-assertions` we would provide. - if opt_level.as_str() != "0" { - if debug_assertions { - cmd.args(&["-C", "debug-assertions=on"]); - if !overflow_checks { - cmd.args(&["-C", "overflow-checks=off"]); - } - } else if overflow_checks { - cmd.args(&["-C", "overflow-checks=on"]); - } - } else if !debug_assertions { - cmd.args(&["-C", "debug-assertions=off"]); - if overflow_checks { - cmd.args(&["-C", "overflow-checks=on"]); - } - } else if !overflow_checks { - cmd.args(&["-C", "overflow-checks=off"]); - } - - if test && unit.target.harness() { - cmd.arg("--test"); - - // Cargo has historically never compiled `--test` binaries with - // `panic=abort` because the `test` crate itself didn't support it. - // Support is now upstream, however, but requires an unstable flag to be - // passed when compiling the test. We require, in Cargo, an unstable - // flag to pass to rustc, so register that here. Eventually this flag - // will simply not be needed when the behavior is stabilized in the Rust - // compiler itself. - if *panic == PanicStrategy::Abort { - cmd.arg("-Z").arg("panic-abort-tests"); - } - } else if test { - cmd.arg("--cfg").arg("test"); - } - - for feat in &unit.features { - cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); - } - - let meta = cx.files().metadata(unit); - cmd.arg("-C").arg(&format!("metadata={}", meta)); - if cx.files().use_extra_filename(unit) { - cmd.arg("-C").arg(&format!("extra-filename=-{}", meta)); - } - - if rpath { - cmd.arg("-C").arg("rpath"); - } - - cmd.arg("--out-dir").arg(&cx.files().out_dir(unit)); - - fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) { - if let Some(val) = val { - let mut joined = OsString::from(prefix); - joined.push(val); - cmd.arg(key).arg(joined); - } - } - - if let CompileKind::Target(n) = unit.kind { - cmd.arg("--target").arg(n.rustc_target()); - } - - opt( - cmd, - "-C", - "linker=", - bcx.linker(unit.kind).as_ref().map(|s| s.as_ref()), - ); - if incremental { - let dir = cx.files().layout(unit.kind).incremental().as_os_str(); - opt(cmd, "-C", "incremental=", Some(dir)); - } - - if strip != Strip::None { - cmd.arg("-C").arg(format!("strip={}", strip)); - } - - if unit.is_std { - // -Zforce-unstable-if-unmarked prevents the accidental use of - // unstable crates within the sysroot (such as "extern crate libc" or - // any non-public crate in the sysroot). - // - // RUSTC_BOOTSTRAP allows unstable features on stable. - cmd.arg("-Z") - .arg("force-unstable-if-unmarked") - .env("RUSTC_BOOTSTRAP", "1"); - } - - // Add `CARGO_BIN_` environment variables for building tests. - if unit.target.is_test() || unit.target.is_bench() { - for bin_target in unit - .pkg - .manifest() - .targets() - .iter() - .filter(|target| target.is_bin()) - { - let exe_path = cx - .files() - .bin_link_for_target(bin_target, unit.kind, cx.bcx)?; - let name = bin_target - .binary_filename() - .unwrap_or(bin_target.name().to_string()); - let key = format!("CARGO_BIN_EXE_{}", name); - cmd.env(&key, exe_path); - } - } - Ok(()) -} - -fn lto_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec { - let mut result = Vec::new(); - let mut push = |arg: &str| { - result.push(OsString::from("-C")); - result.push(OsString::from(arg)); - }; - match cx.lto[unit] { - lto::Lto::Run(None) => push("lto"), - lto::Lto::Run(Some(s)) => push(&format!("lto={}", s)), - lto::Lto::Off => { - push("lto=off"); - push("embed-bitcode=no"); - } - lto::Lto::ObjectAndBitcode => {} // this is rustc's default - lto::Lto::OnlyBitcode => push("linker-plugin-lto"), - lto::Lto::OnlyObject => push("embed-bitcode=no"), - } - result -} - -fn build_deps_args( - cmd: &mut ProcessBuilder, - cx: &mut Context<'_, '_>, - unit: &Unit, -) -> CargoResult<()> { - let bcx = cx.bcx; - cmd.arg("-L").arg(&{ - let mut deps = OsString::from("dependency="); - deps.push(cx.files().deps_dir(unit)); - deps - }); - - // Be sure that the host path is also listed. This'll ensure that proc macro - // dependencies are correctly found (for reexported macros). - if !unit.kind.is_host() { - cmd.arg("-L").arg(&{ - let mut deps = OsString::from("dependency="); - deps.push(cx.files().host_deps()); - deps - }); - } - - let deps = cx.unit_deps(unit); - - // If there is not one linkable target but should, rustc fails later - // on if there is an `extern crate` for it. This may turn into a hard - // error in the future (see PR #4797). - if !deps - .iter() - .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_linkable()) - { - if let Some(dep) = deps - .iter() - .find(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_lib()) - { - bcx.config.shell().warn(format!( - "The package `{}` \ - provides no linkable target. The compiler might raise an error while compiling \ - `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ - Cargo.toml. This warning might turn into a hard error in the future.", - dep.unit.target.crate_name(), - unit.target.crate_name(), - dep.unit.target.crate_name() - ))?; - } - } - - let mut unstable_opts = false; - - for dep in deps { - if dep.unit.mode.is_run_custom_build() { - cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit)); - } - } - - for arg in extern_args(cx, unit, &mut unstable_opts)? { - cmd.arg(arg); - } - - // This will only be set if we're already using a feature - // requiring nightly rust - if unstable_opts { - cmd.arg("-Z").arg("unstable-options"); - } - - Ok(()) -} - -/// Generates a list of `--extern` arguments. -pub fn extern_args( - cx: &Context<'_, '_>, - unit: &Unit, - unstable_opts: &mut bool, -) -> CargoResult> { - let mut result = Vec::new(); - let deps = cx.unit_deps(unit); - - // Closure to add one dependency to `result`. - let mut link_to = - |dep: &UnitDep, extern_crate_name: InternedString, noprelude: bool| -> CargoResult<()> { - let mut value = OsString::new(); - let mut opts = Vec::new(); - if unit - .pkg - .manifest() - .unstable_features() - .require(Feature::public_dependency()) - .is_ok() - && !dep.public - { - opts.push("priv"); - *unstable_opts = true; - } - if noprelude { - opts.push("noprelude"); - *unstable_opts = true; - } - if !opts.is_empty() { - value.push(opts.join(",")); - value.push(":"); - } - value.push(extern_crate_name.as_str()); - value.push("="); - - let mut pass = |file| { - let mut value = value.clone(); - value.push(file); - result.push(OsString::from("--extern")); - result.push(value); - }; - - let outputs = cx.outputs(&dep.unit)?; - - if cx.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { - // Example: rlib dependency for an rlib, rmeta is all that is required. - let output = outputs - .iter() - .find(|output| output.flavor == FileFlavor::Rmeta) - .expect("failed to find rmeta dep for pipelined dep"); - pass(&output.path); - } else { - // Example: a bin needs `rlib` for dependencies, it cannot use rmeta. - for output in outputs.iter() { - if output.flavor == FileFlavor::Linkable { - pass(&output.path); - } - } - } - Ok(()) - }; - - for dep in deps { - if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { - link_to(dep, dep.extern_crate_name, dep.noprelude)?; - } - } - if unit.target.proc_macro() { - // Automatically import `proc_macro`. - result.push(OsString::from("--extern")); - result.push(OsString::from("proc_macro")); - } - - Ok(result) -} - -fn envify(s: &str) -> String { - s.chars() - .flat_map(|c| c.to_uppercase()) - .map(|c| if c == '-' { '_' } else { c }) - .collect() -} - -struct OutputOptions { - /// What format we're emitting from Cargo itself. - format: MessageFormat, - /// Look for JSON message that indicates .rmeta file is available for - /// pipelined compilation. - look_for_metadata_directive: bool, - /// Whether or not to display messages in color. - color: bool, - /// Where to write the JSON messages to support playback later if the unit - /// is fresh. The file is created lazily so that in the normal case, lots - /// of empty files are not created. If this is None, the output will not - /// be cached (such as when replaying cached messages). - cache_cell: Option<(PathBuf, LazyCell)>, - /// If `true`, display any diagnostics. - /// Other types of JSON messages are processed regardless - /// of the value of this flag. - /// - /// This is used primarily for cache replay. If you build with `-vv`, the - /// cache will be filled with diagnostics from dependencies. When the - /// cache is replayed without `-vv`, we don't want to show them. - show_diagnostics: bool, - warnings_seen: usize, - errors_seen: usize, -} - -impl OutputOptions { - fn new(cx: &Context<'_, '_>, unit: &Unit) -> OutputOptions { - let look_for_metadata_directive = cx.rmeta_required(unit); - let color = cx.bcx.config.shell().err_supports_color(); - let path = cx.files().message_cache_path(unit); - // Remove old cache, ignore ENOENT, which is the common case. - drop(fs::remove_file(&path)); - let cache_cell = Some((path, LazyCell::new())); - OutputOptions { - format: cx.bcx.build_config.message_format, - look_for_metadata_directive, - color, - cache_cell, - show_diagnostics: true, - warnings_seen: 0, - errors_seen: 0, - } - } -} - -fn on_stdout_line( - state: &JobState<'_, '_>, - line: &str, - _package_id: PackageId, - _target: &Target, -) -> CargoResult<()> { - state.stdout(line.to_string())?; - Ok(()) -} - -fn on_stderr_line( - state: &JobState<'_, '_>, - line: &str, - package_id: PackageId, - manifest_path: &std::path::Path, - target: &Target, - options: &mut OutputOptions, -) -> CargoResult<()> { - if on_stderr_line_inner(state, line, package_id, manifest_path, target, options)? { - // Check if caching is enabled. - if let Some((path, cell)) = &mut options.cache_cell { - // Cache the output, which will be replayed later when Fresh. - let f = cell.try_borrow_mut_with(|| paths::create(path))?; - debug_assert!(!line.contains('\n')); - f.write_all(line.as_bytes())?; - f.write_all(&[b'\n'])?; - } - } - Ok(()) -} - -/// Returns true if the line should be cached. -fn on_stderr_line_inner( - state: &JobState<'_, '_>, - line: &str, - package_id: PackageId, - manifest_path: &std::path::Path, - target: &Target, - options: &mut OutputOptions, -) -> CargoResult { - // We primarily want to use this function to process JSON messages from - // rustc. The compiler should always print one JSON message per line, and - // otherwise it may have other output intermingled (think RUST_LOG or - // something like that), so skip over everything that doesn't look like a - // JSON message. - if !line.starts_with('{') { - state.stderr(line.to_string())?; - return Ok(true); - } - - let mut compiler_message: Box = match serde_json::from_str(line) { - Ok(msg) => msg, - - // If the compiler produced a line that started with `{` but it wasn't - // valid JSON, maybe it wasn't JSON in the first place! Forward it along - // to stderr. - Err(e) => { - debug!("failed to parse json: {:?}", e); - state.stderr(line.to_string())?; - return Ok(true); - } - }; - - let count_diagnostic = |level, options: &mut OutputOptions| { - if level == "warning" { - options.warnings_seen += 1; - } else if level == "error" { - options.errors_seen += 1; - } - }; - - if let Ok(report) = serde_json::from_str::(compiler_message.get()) { - for item in &report.future_incompat_report { - count_diagnostic(&*item.diagnostic.level, options); - } - state.future_incompat_report(report.future_incompat_report); - return Ok(true); - } - - // Depending on what we're emitting from Cargo itself, we figure out what to - // do with this JSON message. - match options.format { - // In the "human" output formats (human/short) or if diagnostic messages - // from rustc aren't being included in the output of Cargo's JSON - // messages then we extract the diagnostic (if present) here and handle - // it ourselves. - MessageFormat::Human - | MessageFormat::Short - | MessageFormat::Json { - render_diagnostics: true, - .. - } => { - #[derive(serde::Deserialize)] - struct CompilerMessage { - rendered: String, - message: String, - level: String, - } - if let Ok(mut msg) = serde_json::from_str::(compiler_message.get()) { - if msg.message.starts_with("aborting due to") - || msg.message.ends_with("warning emitted") - || msg.message.ends_with("warnings emitted") - { - // Skip this line; we'll print our own summary at the end. - return Ok(true); - } - // state.stderr will add a newline - if msg.rendered.ends_with('\n') { - msg.rendered.pop(); - } - let rendered = if options.color { - msg.rendered - } else { - // Strip only fails if the the Writer fails, which is Cursor - // on a Vec, which should never fail. - strip_ansi_escapes::strip(&msg.rendered) - .map(|v| String::from_utf8(v).expect("utf8")) - .expect("strip should never fail") - }; - if options.show_diagnostics { - count_diagnostic(&msg.level, options); - state.emit_diag(msg.level, rendered)?; - } - return Ok(true); - } - } - - // Remove color information from the rendered string if color is not - // enabled. Cargo always asks for ANSI colors from rustc. This allows - // cached replay to enable/disable colors without re-invoking rustc. - MessageFormat::Json { ansi: false, .. } => { - #[derive(serde::Deserialize, serde::Serialize)] - struct CompilerMessage { - rendered: String, - #[serde(flatten)] - other: std::collections::BTreeMap, - } - if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { - error.rendered = strip_ansi_escapes::strip(&error.rendered) - .map(|v| String::from_utf8(v).expect("utf8")) - .unwrap_or(error.rendered); - let new_line = serde_json::to_string(&error)?; - let new_msg: Box = serde_json::from_str(&new_line)?; - compiler_message = new_msg; - } - } - - // If ansi colors are desired then we should be good to go! We can just - // pass through this message as-is. - MessageFormat::Json { ansi: true, .. } => {} - } - - // In some modes of execution we will execute rustc with `-Z - // emit-artifact-notifications` to look for metadata files being produced. When this - // happens we may be able to start subsequent compilations more quickly than - // waiting for an entire compile to finish, possibly using more parallelism - // available to complete a compilation session more quickly. - // - // In these cases look for a matching directive and inform Cargo internally - // that a metadata file has been produced. - if options.look_for_metadata_directive { - #[derive(serde::Deserialize)] - struct ArtifactNotification { - artifact: String, - } - if let Ok(artifact) = serde_json::from_str::(compiler_message.get()) { - log::trace!("found directive from rustc: `{}`", artifact.artifact); - if artifact.artifact.ends_with(".rmeta") { - log::debug!("looks like metadata finished early!"); - state.rmeta_produced(); - } - return Ok(false); - } - } - - #[derive(serde::Deserialize)] - struct JobserverNotification { - jobserver_event: Event, - } - - #[derive(Debug, serde::Deserialize)] - enum Event { - WillAcquire, - Release, - } - - if let Ok(JobserverNotification { jobserver_event }) = - serde_json::from_str::(compiler_message.get()) - { - log::info!( - "found jobserver directive from rustc: `{:?}`", - jobserver_event - ); - match jobserver_event { - Event::WillAcquire => state.will_acquire(), - Event::Release => state.release_token(), - } - return Ok(false); - } - - // And failing all that above we should have a legitimate JSON diagnostic - // from the compiler, so wrap it in an external Cargo JSON message - // indicating which package it came from and then emit it. - - if !options.show_diagnostics { - return Ok(true); - } - - #[derive(serde::Deserialize)] - struct CompilerMessage { - level: String, - } - if let Ok(message) = serde_json::from_str::(compiler_message.get()) { - count_diagnostic(&message.level, options); - } - - let msg = machine_message::FromCompiler { - package_id, - manifest_path, - target, - message: compiler_message, - } - .to_json_string(); - - // Switch json lines from rustc/rustdoc that appear on stderr to stdout - // instead. We want the stdout of Cargo to always be machine parseable as - // stderr has our colorized human-readable messages. - state.stdout(msg)?; - Ok(true) -} - -fn replay_output_cache( - package_id: PackageId, - manifest_path: PathBuf, - target: &Target, - path: PathBuf, - format: MessageFormat, - color: bool, - show_diagnostics: bool, -) -> Work { - let target = target.clone(); - let mut options = OutputOptions { - format, - look_for_metadata_directive: true, - color, - cache_cell: None, - show_diagnostics, - warnings_seen: 0, - errors_seen: 0, - }; - Work::new(move |state| { - if !path.exists() { - // No cached output, probably didn't emit anything. - return Ok(()); - } - // We sometimes have gigabytes of output from the compiler, so avoid - // loading it all into memory at once, as that can cause OOM where - // otherwise there would be none. - let file = paths::open(&path)?; - let mut reader = std::io::BufReader::new(file); - let mut line = String::new(); - loop { - let length = reader.read_line(&mut line)?; - if length == 0 { - break; - } - let trimmed = line.trim_end_matches(&['\n', '\r'][..]); - on_stderr_line( - state, - trimmed, - package_id, - &manifest_path, - &target, - &mut options, - )?; - line.clear(); - } - Ok(()) - }) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/output_depinfo.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/output_depinfo.rs deleted file mode 100644 index 66596bb18..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/output_depinfo.rs +++ /dev/null @@ -1,163 +0,0 @@ -//! Module for generating dep-info files. -//! -//! `rustc` generates a dep-info file with a `.d` extension at the same -//! location of the output artifacts as a result of using `--emit=dep-info`. -//! This dep-info file is a Makefile-like syntax that indicates the -//! dependencies needed to build the artifact. Example: -//! -//! ```makefile -//! /path/to/target/debug/deps/cargo-b6219d178925203d: src/bin/main.rs src/bin/cargo/cli.rs # โ€ฆ etc. -//! ``` -//! -//! The fingerprint module has code to parse these files, and stores them as -//! binary format in the fingerprint directory. These are used to quickly scan -//! for any changed files. -//! -//! On top of all this, Cargo emits its own dep-info files in the output -//! directory. This is done for every "uplifted" artifact. These are intended -//! to be used with external build systems so that they can detect if Cargo -//! needs to be re-executed. It includes all the entries from the `rustc` -//! dep-info file, and extends it with any `rerun-if-changed` entries from -//! build scripts. It also includes sources from any path dependencies. Registry -//! dependencies are not included under the assumption that changes to them can -//! be detected via changes to `Cargo.lock`. - -use std::collections::{BTreeSet, HashSet}; -use std::io::{BufWriter, Write}; -use std::path::{Path, PathBuf}; - -use super::{fingerprint, Context, FileFlavor, Unit}; -use crate::util::{internal, CargoResult}; -use cargo_util::paths; -use log::debug; - -fn render_filename>(path: P, basedir: Option<&str>) -> CargoResult { - let path = path.as_ref(); - let relpath = match basedir { - None => path, - Some(base) => match path.strip_prefix(base) { - Ok(relpath) => relpath, - _ => path, - }, - }; - relpath - .to_str() - .ok_or_else(|| internal(format!("path `{:?}` not utf-8", relpath))) - .map(|f| f.replace(" ", "\\ ")) -} - -fn add_deps_for_unit( - deps: &mut BTreeSet, - cx: &mut Context<'_, '_>, - unit: &Unit, - visited: &mut HashSet, -) -> CargoResult<()> { - if !visited.insert(unit.clone()) { - return Ok(()); - } - - // units representing the execution of a build script don't actually - // generate a dep info file, so we just keep on going below - if !unit.mode.is_run_custom_build() { - // Add dependencies from rustc dep-info output (stored in fingerprint directory) - let dep_info_loc = fingerprint::dep_info_loc(cx, unit); - if let Some(paths) = - fingerprint::parse_dep_info(unit.pkg.root(), cx.files().host_root(), &dep_info_loc)? - { - for path in paths.files { - deps.insert(path); - } - } else { - debug!( - "can't find dep_info for {:?} {}", - unit.pkg.package_id(), - unit.target - ); - return Err(internal("dep_info missing")); - } - } - - // Add rerun-if-changed dependencies - if let Some(metadata) = cx.find_build_script_metadata(unit) { - if let Some(output) = cx.build_script_outputs.lock().unwrap().get(metadata) { - for path in &output.rerun_if_changed { - // The paths we have saved from the unit are of arbitrary relativeness and may be - // relative to the crate root of the dependency. - let path = unit.pkg.root().join(path); - deps.insert(path); - } - } - } - - // Recursively traverse all transitive dependencies - let unit_deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. - for dep in unit_deps { - if dep.unit.is_local() { - add_deps_for_unit(deps, cx, &dep.unit, visited)?; - } - } - Ok(()) -} - -/// Save a `.d` dep-info file for the given unit. -/// -/// This only saves files for uplifted artifacts. -pub fn output_depinfo(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<()> { - let bcx = cx.bcx; - let mut deps = BTreeSet::new(); - let mut visited = HashSet::new(); - let success = add_deps_for_unit(&mut deps, cx, unit, &mut visited).is_ok(); - let basedir_string; - let basedir = match bcx.config.build_config()?.dep_info_basedir.clone() { - Some(value) => { - basedir_string = value - .resolve_path(bcx.config) - .as_os_str() - .to_str() - .ok_or_else(|| anyhow::format_err!("build.dep-info-basedir path not utf-8"))? - .to_string(); - Some(basedir_string.as_str()) - } - None => None, - }; - let deps = deps - .iter() - .map(|f| render_filename(f, basedir)) - .collect::>>()?; - - for output in cx - .outputs(unit)? - .iter() - .filter(|o| !matches!(o.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) - { - if let Some(ref link_dst) = output.hardlink { - let output_path = link_dst.with_extension("d"); - if success { - let target_fn = render_filename(link_dst, basedir)?; - - // If nothing changed don't recreate the file which could alter - // its mtime - if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) { - if previous.files.iter().eq(deps.iter().map(Path::new)) { - continue; - } - } - - // Otherwise write it all out - let mut outfile = BufWriter::new(paths::create(output_path)?); - write!(outfile, "{}:", target_fn)?; - for dep in &deps { - write!(outfile, " {}", dep)?; - } - writeln!(outfile)?; - - // dep-info generation failed, so delete output file. This will - // usually cause the build system to always rerun the build - // rule, which is correct if inefficient. - } else if output_path.exists() { - paths::remove_file(output_path)?; - } - } - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/rustdoc.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/rustdoc.rs deleted file mode 100644 index d9244404a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/rustdoc.rs +++ /dev/null @@ -1,194 +0,0 @@ -//! Utilities for building with rustdoc. - -use crate::core::compiler::context::Context; -use crate::core::compiler::unit::Unit; -use crate::core::compiler::CompileKind; -use crate::sources::CRATES_IO_REGISTRY; -use crate::util::errors::{internal, CargoResult}; -use cargo_util::ProcessBuilder; -use std::collections::HashMap; -use std::fmt; -use std::hash; -use url::Url; - -const DOCS_RS_URL: &'static str = "https://docs.rs/"; - -/// Mode used for `std`. -#[derive(Debug, Hash)] -pub enum RustdocExternMode { - /// Use a local `file://` URL. - Local, - /// Use a remote URL to (default). - Remote, - /// An arbitrary URL. - Url(String), -} - -impl From for RustdocExternMode { - fn from(s: String) -> RustdocExternMode { - match s.as_ref() { - "local" => RustdocExternMode::Local, - "remote" => RustdocExternMode::Remote, - _ => RustdocExternMode::Url(s), - } - } -} - -impl fmt::Display for RustdocExternMode { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - RustdocExternMode::Local => "local".fmt(f), - RustdocExternMode::Remote => "remote".fmt(f), - RustdocExternMode::Url(s) => s.fmt(f), - } - } -} - -impl<'de> serde::de::Deserialize<'de> for RustdocExternMode { - fn deserialize(deserializer: D) -> Result - where - D: serde::de::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - Ok(s.into()) - } -} - -#[derive(serde::Deserialize, Debug)] -#[serde(default)] -pub struct RustdocExternMap { - #[serde(deserialize_with = "default_crates_io_to_docs_rs")] - pub(crate) registries: HashMap, - std: Option, -} - -impl Default for RustdocExternMap { - fn default() -> Self { - let mut registries = HashMap::new(); - registries.insert(CRATES_IO_REGISTRY.into(), DOCS_RS_URL.into()); - Self { - registries, - std: None, - } - } -} - -fn default_crates_io_to_docs_rs<'de, D: serde::Deserializer<'de>>( - de: D, -) -> Result, D::Error> { - use serde::Deserialize; - let mut registries = HashMap::deserialize(de)?; - if !registries.contains_key(CRATES_IO_REGISTRY) { - registries.insert(CRATES_IO_REGISTRY.into(), DOCS_RS_URL.into()); - } - Ok(registries) -} - -impl hash::Hash for RustdocExternMap { - fn hash(&self, into: &mut H) { - self.std.hash(into); - for (key, value) in &self.registries { - key.hash(into); - value.hash(into); - } - } -} - -pub fn add_root_urls( - cx: &Context<'_, '_>, - unit: &Unit, - rustdoc: &mut ProcessBuilder, -) -> CargoResult<()> { - let config = cx.bcx.config; - if !config.cli_unstable().rustdoc_map { - log::debug!("`doc.extern-map` ignored, requires -Zrustdoc-map flag"); - return Ok(()); - } - let map = config.doc_extern_map()?; - let mut unstable_opts = false; - // Collect mapping of registry name -> index url. - let name2url: HashMap<&String, Url> = map - .registries - .keys() - .filter_map(|name| { - if let Ok(index_url) = config.get_registry_index(name) { - Some((name, index_url)) - } else { - log::warn!( - "`doc.extern-map.{}` specifies a registry that is not defined", - name - ); - None - } - }) - .collect(); - for dep in cx.unit_deps(unit) { - if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { - for (registry, location) in &map.registries { - let sid = dep.unit.pkg.package_id().source_id(); - let matches_registry = || -> bool { - if !sid.is_registry() { - return false; - } - if sid.is_default_registry() { - return registry == CRATES_IO_REGISTRY; - } - if let Some(index_url) = name2url.get(registry) { - return index_url == sid.url(); - } - false - }; - if matches_registry() { - let mut url = location.clone(); - if !url.contains("{pkg_name}") && !url.contains("{version}") { - if !url.ends_with('/') { - url.push('/'); - } - url.push_str("{pkg_name}/{version}/"); - } - let url = url - .replace("{pkg_name}", &dep.unit.pkg.name()) - .replace("{version}", &dep.unit.pkg.version().to_string()); - rustdoc.arg("--extern-html-root-url"); - rustdoc.arg(format!("{}={}", dep.unit.target.crate_name(), url)); - unstable_opts = true; - } - } - } - } - let std_url = match &map.std { - None | Some(RustdocExternMode::Remote) => None, - Some(RustdocExternMode::Local) => { - let sysroot = &cx.bcx.target_data.info(CompileKind::Host).sysroot; - let html_root = sysroot.join("share").join("doc").join("rust").join("html"); - if html_root.exists() { - let url = Url::from_file_path(&html_root).map_err(|()| { - internal(format!( - "`{}` failed to convert to URL", - html_root.display() - )) - })?; - Some(url.to_string()) - } else { - log::warn!( - "`doc.extern-map.std` is \"local\", but local docs don't appear to exist at {}", - html_root.display() - ); - None - } - } - Some(RustdocExternMode::Url(s)) => Some(s.to_string()), - }; - if let Some(url) = std_url { - for name in &["std", "core", "alloc", "proc_macro"] { - rustdoc.arg("--extern-html-root-url"); - rustdoc.arg(format!("{}={}", name, url)); - unstable_opts = true; - } - } - - if unstable_opts { - rustdoc.arg("-Zunstable-options"); - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/standard_lib.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/standard_lib.rs deleted file mode 100644 index 6b76a5681..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/standard_lib.rs +++ /dev/null @@ -1,218 +0,0 @@ -//! Code for building the standard library. - -use crate::core::compiler::UnitInterner; -use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit}; -use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; -use crate::core::resolver::HasDevUnits; -use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace}; -use crate::ops::{self, Packages}; -use crate::util::errors::CargoResult; -use std::collections::{HashMap, HashSet}; -use std::env; -use std::path::PathBuf; - -/// Parse the `-Zbuild-std` flag. -pub fn parse_unstable_flag(value: Option<&str>) -> Vec { - // This is a temporary hack until there is a more principled way to - // declare dependencies in Cargo.toml. - let value = value.unwrap_or("std"); - let mut crates: HashSet<&str> = value.split(',').collect(); - if crates.contains("std") { - crates.insert("core"); - crates.insert("alloc"); - crates.insert("proc_macro"); - crates.insert("panic_unwind"); - crates.insert("compiler_builtins"); - } else if crates.contains("core") { - crates.insert("compiler_builtins"); - } - crates.into_iter().map(|s| s.to_string()).collect() -} - -/// Resolve the standard library dependencies. -pub fn resolve_std<'cfg>( - ws: &Workspace<'cfg>, - target_data: &RustcTargetData<'cfg>, - requested_targets: &[CompileKind], - crates: &[String], -) -> CargoResult<(PackageSet<'cfg>, Resolve, ResolvedFeatures)> { - let src_path = detect_sysroot_src_path(target_data)?; - let to_patch = [ - "rustc-std-workspace-core", - "rustc-std-workspace-alloc", - "rustc-std-workspace-std", - ]; - let patches = to_patch - .iter() - .map(|&name| { - let source_path = SourceId::for_path(&src_path.join("library").join(name))?; - let dep = Dependency::parse(name, None, source_path)?; - Ok(dep) - }) - .collect::>>()?; - let crates_io_url = crate::sources::CRATES_IO_INDEX.parse().unwrap(); - let mut patch = HashMap::new(); - patch.insert(crates_io_url, patches); - let members = vec![ - String::from("library/std"), - String::from("library/core"), - String::from("library/alloc"), - String::from("library/test"), - ]; - let ws_config = crate::core::WorkspaceConfig::Root(crate::core::WorkspaceRootConfig::new( - &src_path, - &Some(members), - /*default_members*/ &None, - /*exclude*/ &None, - /*custom_metadata*/ &None, - )); - let virtual_manifest = crate::core::VirtualManifest::new( - /*replace*/ Vec::new(), - patch, - ws_config, - /*profiles*/ None, - crate::core::Features::default(), - None, - ); - - let config = ws.config(); - // This is a delicate hack. In order for features to resolve correctly, - // the resolver needs to run a specific "current" member of the workspace. - // Thus, in order to set the features for `std`, we need to set `libtest` - // to be the "current" member. `libtest` is the root, and all other - // standard library crates are dependencies from there. Since none of the - // other crates need to alter their features, this should be fine, for - // now. Perhaps in the future features will be decoupled from the resolver - // and it will be easier to control feature selection. - let current_manifest = src_path.join("library/test/Cargo.toml"); - // TODO: Consider doing something to enforce --locked? Or to prevent the - // lock file from being written, such as setting ephemeral. - let mut std_ws = Workspace::new_virtual(src_path, current_manifest, virtual_manifest, config)?; - // Don't require optional dependencies in this workspace, aka std's own - // `[dev-dependencies]`. No need for us to generate a `Resolve` which has - // those included because we'll never use them anyway. - std_ws.set_require_optional_deps(false); - // `test` is not in the default set because it is optional, but it needs - // to be part of the resolve in case we do need it. - let mut spec_pkgs = Vec::from(crates); - spec_pkgs.push("test".to_string()); - let spec = Packages::Packages(spec_pkgs); - let specs = spec.to_package_id_specs(&std_ws)?; - let features = match &config.cli_unstable().build_std_features { - Some(list) => list.clone(), - None => vec![ - "panic-unwind".to_string(), - "backtrace".to_string(), - "default".to_string(), - ], - }; - let cli_features = CliFeatures::from_command_line( - &features, /*all_features*/ false, /*uses_default_features*/ false, - )?; - let resolve = ops::resolve_ws_with_opts( - &std_ws, - target_data, - requested_targets, - &cli_features, - &specs, - HasDevUnits::No, - crate::core::resolver::features::ForceAllTargets::No, - )?; - Ok(( - resolve.pkg_set, - resolve.targeted_resolve, - resolve.resolved_features, - )) -} - -/// Generate a list of root `Unit`s for the standard library. -/// -/// The given slice of crate names is the root set. -pub fn generate_std_roots( - crates: &[String], - std_resolve: &Resolve, - std_features: &ResolvedFeatures, - kinds: &[CompileKind], - package_set: &PackageSet<'_>, - interner: &UnitInterner, - profiles: &Profiles, -) -> CargoResult>> { - // Generate the root Units for the standard library. - let std_ids = crates - .iter() - .map(|crate_name| std_resolve.query(crate_name)) - .collect::>>()?; - // Convert PackageId to Package. - let std_pkgs = package_set.get_many(std_ids)?; - // Generate a map of Units for each kind requested. - let mut ret = HashMap::new(); - for pkg in std_pkgs { - let lib = pkg - .targets() - .iter() - .find(|t| t.is_lib()) - .expect("std has a lib"); - let unit_for = UnitFor::new_normal(); - // I don't think we need to bother with Check here, the difference - // in time is minimal, and the difference in caching is - // significant. - let mode = CompileMode::Build; - let features = std_features.activated_features(pkg.package_id(), FeaturesFor::NormalOrDev); - - for kind in kinds { - let list = ret.entry(*kind).or_insert_with(Vec::new); - let profile = profiles.get_profile( - pkg.package_id(), - /*is_member*/ false, - /*is_local*/ false, - unit_for, - mode, - *kind, - ); - list.push(interner.intern( - pkg, - lib, - profile, - *kind, - mode, - features.clone(), - /*is_std*/ true, - /*dep_hash*/ 0, - )); - } - } - Ok(ret) -} - -fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult { - if let Some(s) = env::var_os("__CARGO_TESTS_ONLY_SRC_ROOT") { - return Ok(s.into()); - } - - // NOTE: This is temporary until we figure out how to acquire the source. - let src_path = target_data - .info(CompileKind::Host) - .sysroot - .join("lib") - .join("rustlib") - .join("src") - .join("rust"); - let lock = src_path.join("Cargo.lock"); - if !lock.exists() { - let msg = format!( - "{:?} does not exist, unable to build with the standard \ - library, try:\n rustup component add rust-src", - lock - ); - match env::var("RUSTUP_TOOLCHAIN") { - Ok(rustup_toolchain) => { - anyhow::bail!("{} --toolchain {}", msg, rustup_toolchain); - } - Err(_) => { - anyhow::bail!(msg); - } - } - } - Ok(src_path) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/timings.js b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/timings.js deleted file mode 100644 index 986070ab0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/timings.js +++ /dev/null @@ -1,470 +0,0 @@ -// Position of the vertical axis. -const X_LINE = 50; -// General-use margin size. -const MARGIN = 5; -// Position of the horizontal axis, relative to the bottom. -const Y_LINE = 35; -// Minimum distance between time tick labels. -const MIN_TICK_DIST = 50; -// Radius for rounded rectangle corners. -const RADIUS = 3; -// Height of unit boxes. -const BOX_HEIGHT = 25; -// Distance between Y tick marks on the unit graph. -const Y_TICK_DIST = BOX_HEIGHT + 2; -// Rects used for mouseover detection. -// Objects of {x, y, x2, y2, i} where `i` is the index into UNIT_DATA. -let HIT_BOXES = []; -// Index into UNIT_DATA of the last unit hovered over by mouse. -let LAST_HOVER = null; -// Key is unit index, value is {x, y, width, rmeta_x} of the box. -let UNIT_COORDS = {}; -// Map of unit index to the index it was unlocked by. -let REVERSE_UNIT_DEPS = {}; -let REVERSE_UNIT_RMETA_DEPS = {}; -for (let n=0; n unit.duration >= min_time); - - const graph_height = Y_TICK_DIST * units.length; - const {ctx, graph_width, canvas_width, canvas_height, px_per_sec} = draw_graph_axes('pipeline-graph', graph_height); - const container = document.getElementById('pipeline-container'); - container.style.width = canvas_width; - container.style.height = canvas_height; - - // Canvas for hover highlights. This is a separate layer to improve performance. - const linectx = setup_canvas('pipeline-graph-lines', canvas_width, canvas_height); - linectx.clearRect(0, 0, canvas_width, canvas_height); - - // Draw Y tick marks. - for (let n=1; n 1) { - ctx.beginPath(); - ctx.fillStyle = cpuFillStyle; - let bottomLeft = coord(CPU_USAGE[0][0], 0); - ctx.moveTo(bottomLeft.x, bottomLeft.y); - for (let i=0; i < CPU_USAGE.length; i++) { - let [time, usage] = CPU_USAGE[i]; - let {x, y} = coord(time, usage / 100.0 * max_v); - ctx.lineTo(x, y); - } - let bottomRight = coord(CPU_USAGE[CPU_USAGE.length - 1][0], 0); - ctx.lineTo(bottomRight.x, bottomRight.y); - ctx.fill(); - } - - function draw_line(style, key) { - let first = CONCURRENCY_DATA[0]; - let last = coord(first.t, key(first)); - ctx.strokeStyle = style; - ctx.beginPath(); - ctx.moveTo(last.x, last.y); - for (let i=1; i 100) { - throw Error("tick loop too long"); - } - count += 1; - if (max_value <= max_ticks * step) { - break; - } - step += 10; - } - } - const tick_dist = px_per_v * step; - const num_ticks = Math.floor(max_value / step); - return {step, tick_dist, num_ticks}; -} - -function codegen_time(unit) { - if (unit.rmeta_time == null) { - return null; - } - let ctime = unit.duration - unit.rmeta_time; - return [unit.rmeta_time, ctime]; -} - -function roundedRect(ctx, x, y, width, height, r) { - r = Math.min(r, width, height); - ctx.beginPath(); - ctx.moveTo(x+r, y); - ctx.lineTo(x+width-r, y); - ctx.arc(x+width-r, y+r, r, 3*Math.PI/2, 0); - ctx.lineTo(x+width, y+height-r); - ctx.arc(x+width-r, y+height-r, r, 0, Math.PI/2); - ctx.lineTo(x+r, y+height); - ctx.arc(x+r, y+height-r, r, Math.PI/2, Math.PI); - ctx.lineTo(x, y-r); - ctx.arc(x+r, y+r, r, Math.PI, 3*Math.PI/2); - ctx.closePath(); -} - -function pipeline_mouse_hit(event) { - // This brute-force method can be optimized if needed. - for (let box of HIT_BOXES) { - if (event.offsetX >= box.x && event.offsetX <= box.x2 && - event.offsetY >= box.y && event.offsetY <= box.y2) { - return box; - } - } -} - -function pipeline_mousemove(event) { - // Highlight dependency lines on mouse hover. - let box = pipeline_mouse_hit(event); - if (box) { - if (box.i != LAST_HOVER) { - LAST_HOVER = box.i; - let g = document.getElementById('pipeline-graph-lines'); - let ctx = g.getContext('2d'); - ctx.clearRect(0, 0, g.width, g.height); - ctx.save(); - ctx.translate(X_LINE, MARGIN); - ctx.lineWidth = 2; - draw_dep_lines(ctx, box.i, true); - - if (box.i in REVERSE_UNIT_DEPS) { - const dep_unit = REVERSE_UNIT_DEPS[box.i]; - if (dep_unit in UNIT_COORDS) { - const {x, y, rmeta_x} = UNIT_COORDS[dep_unit]; - draw_one_dep_line(ctx, x, y, box.i, true); - } - } - if (box.i in REVERSE_UNIT_RMETA_DEPS) { - const dep_unit = REVERSE_UNIT_RMETA_DEPS[box.i]; - if (dep_unit in UNIT_COORDS) { - const {x, y, rmeta_x} = UNIT_COORDS[dep_unit]; - draw_one_dep_line(ctx, rmeta_x, y, box.i, true); - } - } - ctx.restore(); - } - } -} - -render_pipeline_graph(); -render_timing_graph(); - -// Set up and handle controls. -{ - const range = document.getElementById('min-unit-time'); - const time_output = document.getElementById('min-unit-time-output'); - time_output.innerHTML = `${range.value}s`; - range.oninput = event => { - time_output.innerHTML = `${range.value}s`; - render_pipeline_graph(); - }; - - const scale = document.getElementById('scale'); - const scale_output = document.getElementById('scale-output'); - scale_output.innerHTML = `${scale.value}`; - scale.oninput = event => { - scale_output.innerHTML = `${scale.value}`; - render_pipeline_graph(); - render_timing_graph(); - }; -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/timings.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/timings.rs deleted file mode 100644 index 33b46ce16..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/timings.rs +++ /dev/null @@ -1,780 +0,0 @@ -//! Timing tracking. -//! -//! This module implements some simple tracking information for timing of how -//! long it takes for different units to compile. -use super::{CompileMode, Unit}; -use crate::core::compiler::job_queue::JobId; -use crate::core::compiler::BuildContext; -use crate::core::PackageId; -use crate::util::cpu::State; -use crate::util::machine_message::{self, Message}; -use crate::util::{CargoResult, Config}; -use anyhow::Context as _; -use cargo_util::paths; -use std::collections::HashMap; -use std::io::{BufWriter, Write}; -use std::time::{Duration, Instant, SystemTime}; - -pub struct Timings<'cfg> { - config: &'cfg Config, - /// Whether or not timings should be captured. - enabled: bool, - /// If true, saves an HTML report to disk. - report_html: bool, - /// If true, reports unit completion to stderr. - report_info: bool, - /// If true, emits JSON information with timing information. - report_json: bool, - /// When Cargo started. - start: Instant, - /// A rendered string of when compilation started. - start_str: String, - /// A summary of the root units. - /// - /// Tuples of `(package_description, target_descrptions)`. - root_targets: Vec<(String, Vec)>, - /// The build profile. - profile: String, - /// Total number of fresh units. - total_fresh: u32, - /// Total number of dirty units. - total_dirty: u32, - /// Time tracking for each individual unit. - unit_times: Vec, - /// Units that are in the process of being built. - /// When they finished, they are moved to `unit_times`. - active: HashMap, - /// Concurrency-tracking information. This is periodically updated while - /// compilation progresses. - concurrency: Vec, - /// Last recorded state of the system's CPUs and when it happened - last_cpu_state: Option, - last_cpu_recording: Instant, - /// Recorded CPU states, stored as tuples. First element is when the - /// recording was taken and second element is percentage usage of the - /// system. - cpu_usage: Vec<(f64, f64)>, -} - -/// Tracking information for an individual unit. -struct UnitTime { - unit: Unit, - /// A string describing the cargo target. - target: String, - /// The time when this unit started as an offset in seconds from `Timings::start`. - start: f64, - /// Total time to build this unit in seconds. - duration: f64, - /// The time when the `.rmeta` file was generated, an offset in seconds - /// from `start`. - rmeta_time: Option, - /// Reverse deps that are freed to run after this unit finished. - unlocked_units: Vec, - /// Same as `unlocked_units`, but unlocked by rmeta. - unlocked_rmeta_units: Vec, -} - -/// Periodic concurrency tracking information. -#[derive(serde::Serialize)] -struct Concurrency { - /// Time as an offset in seconds from `Timings::start`. - t: f64, - /// Number of units currently running. - active: usize, - /// Number of units that could run, but are waiting for a jobserver token. - waiting: usize, - /// Number of units that are not yet ready, because they are waiting for - /// dependencies to finish. - inactive: usize, - /// Number of rustc "extra" threads -- i.e., how many tokens have been - /// provided across all current rustc instances that are not the main thread - /// tokens. - rustc_parallelism: usize, -} - -impl<'cfg> Timings<'cfg> { - pub fn new(bcx: &BuildContext<'_, 'cfg>, root_units: &[Unit]) -> Timings<'cfg> { - let has_report = |what| { - bcx.config - .cli_unstable() - .timings - .as_ref() - .map_or(false, |t| t.iter().any(|opt| opt == what)) - }; - let report_html = has_report("html"); - let report_info = has_report("info"); - let report_json = has_report("json"); - let enabled = report_html | report_info | report_json; - - let mut root_map: HashMap> = HashMap::new(); - for unit in root_units { - let target_desc = unit.target.description_named(); - root_map - .entry(unit.pkg.package_id()) - .or_default() - .push(target_desc); - } - let root_targets = root_map - .into_iter() - .map(|(pkg_id, targets)| { - let pkg_desc = format!("{} {}", pkg_id.name(), pkg_id.version()); - (pkg_desc, targets) - }) - .collect(); - let start_str = humantime::format_rfc3339_seconds(SystemTime::now()).to_string(); - let profile = bcx.build_config.requested_profile.to_string(); - let last_cpu_state = if enabled { - match State::current() { - Ok(state) => Some(state), - Err(e) => { - log::info!("failed to get CPU state, CPU tracking disabled: {:?}", e); - None - } - } - } else { - None - }; - - Timings { - config: bcx.config, - enabled, - report_html, - report_info, - report_json, - start: bcx.config.creation_time(), - start_str, - root_targets, - profile, - total_fresh: 0, - total_dirty: 0, - unit_times: Vec::new(), - active: HashMap::new(), - concurrency: Vec::new(), - last_cpu_state, - last_cpu_recording: Instant::now(), - cpu_usage: Vec::new(), - } - } - - /// Mark that a unit has started running. - pub fn unit_start(&mut self, id: JobId, unit: Unit) { - if !self.enabled { - return; - } - let mut target = if unit.target.is_lib() && unit.mode == CompileMode::Build { - // Special case for brevity, since most dependencies hit - // this path. - "".to_string() - } else { - format!(" {}", unit.target.description_named()) - }; - match unit.mode { - CompileMode::Test => target.push_str(" (test)"), - CompileMode::Build => {} - CompileMode::Check { test: true } => target.push_str(" (check-test)"), - CompileMode::Check { test: false } => target.push_str(" (check)"), - CompileMode::Bench => target.push_str(" (bench)"), - CompileMode::Doc { .. } => target.push_str(" (doc)"), - CompileMode::Doctest => target.push_str(" (doc test)"), - CompileMode::Docscrape => target.push_str(" (doc scrape)"), - CompileMode::RunCustomBuild => target.push_str(" (run)"), - } - let unit_time = UnitTime { - unit, - target, - start: self.start.elapsed().as_secs_f64(), - duration: 0.0, - rmeta_time: None, - unlocked_units: Vec::new(), - unlocked_rmeta_units: Vec::new(), - }; - assert!(self.active.insert(id, unit_time).is_none()); - } - - /// Mark that the `.rmeta` file as generated. - pub fn unit_rmeta_finished(&mut self, id: JobId, unlocked: Vec<&Unit>) { - if !self.enabled { - return; - } - // `id` may not always be active. "fresh" units unconditionally - // generate `Message::Finish`, but this active map only tracks dirty - // units. - let unit_time = match self.active.get_mut(&id) { - Some(ut) => ut, - None => return, - }; - let t = self.start.elapsed().as_secs_f64(); - unit_time.rmeta_time = Some(t - unit_time.start); - assert!(unit_time.unlocked_rmeta_units.is_empty()); - unit_time - .unlocked_rmeta_units - .extend(unlocked.iter().cloned().cloned()); - } - - /// Mark that a unit has finished running. - pub fn unit_finished(&mut self, id: JobId, unlocked: Vec<&Unit>) { - if !self.enabled { - return; - } - // See note above in `unit_rmeta_finished`, this may not always be active. - let mut unit_time = match self.active.remove(&id) { - Some(ut) => ut, - None => return, - }; - let t = self.start.elapsed().as_secs_f64(); - unit_time.duration = t - unit_time.start; - assert!(unit_time.unlocked_units.is_empty()); - unit_time - .unlocked_units - .extend(unlocked.iter().cloned().cloned()); - if self.report_info { - let msg = format!( - "{}{} in {:.1}s", - unit_time.name_ver(), - unit_time.target, - unit_time.duration - ); - let _ = self - .config - .shell() - .status_with_color("Completed", msg, termcolor::Color::Cyan); - } - if self.report_json { - let msg = machine_message::TimingInfo { - package_id: unit_time.unit.pkg.package_id(), - target: &unit_time.unit.target, - mode: unit_time.unit.mode, - duration: unit_time.duration, - rmeta_time: unit_time.rmeta_time, - } - .to_json_string(); - crate::drop_println!(self.config, "{}", msg); - } - self.unit_times.push(unit_time); - } - - /// This is called periodically to mark the concurrency of internal structures. - pub fn mark_concurrency( - &mut self, - active: usize, - waiting: usize, - inactive: usize, - rustc_parallelism: usize, - ) { - if !self.enabled { - return; - } - let c = Concurrency { - t: self.start.elapsed().as_secs_f64(), - active, - waiting, - inactive, - rustc_parallelism, - }; - self.concurrency.push(c); - } - - /// Mark that a fresh unit was encountered. - pub fn add_fresh(&mut self) { - self.total_fresh += 1; - } - - /// Mark that a dirty unit was encountered. - pub fn add_dirty(&mut self) { - self.total_dirty += 1; - } - - /// Take a sample of CPU usage - pub fn record_cpu(&mut self) { - if !self.enabled { - return; - } - let prev = match &mut self.last_cpu_state { - Some(state) => state, - None => return, - }; - // Don't take samples too too frequently, even if requested. - let now = Instant::now(); - if self.last_cpu_recording.elapsed() < Duration::from_millis(100) { - return; - } - let current = match State::current() { - Ok(s) => s, - Err(e) => { - log::info!("failed to get CPU state: {:?}", e); - return; - } - }; - let pct_idle = current.idle_since(prev); - *prev = current; - self.last_cpu_recording = now; - let dur = now.duration_since(self.start).as_secs_f64(); - self.cpu_usage.push((dur, 100.0 - pct_idle)); - } - - /// Call this when all units are finished. - pub fn finished( - &mut self, - bcx: &BuildContext<'_, '_>, - error: &Option, - ) -> CargoResult<()> { - if !self.enabled { - return Ok(()); - } - self.mark_concurrency(0, 0, 0, 0); - self.unit_times - .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap()); - if self.report_html { - self.report_html(bcx, error) - .with_context(|| "failed to save timing report")?; - } - Ok(()) - } - - /// Save HTML report to disk. - fn report_html( - &self, - bcx: &BuildContext<'_, '_>, - error: &Option, - ) -> CargoResult<()> { - let duration = self.start.elapsed().as_secs_f64(); - let timestamp = self.start_str.replace(&['-', ':'][..], ""); - let filename = format!("cargo-timing-{}.html", timestamp); - let mut f = BufWriter::new(paths::create(&filename)?); - let roots: Vec<&str> = self - .root_targets - .iter() - .map(|(name, _targets)| name.as_str()) - .collect(); - f.write_all(HTML_TMPL.replace("{ROOTS}", &roots.join(", ")).as_bytes())?; - self.write_summary_table(&mut f, duration, bcx, error)?; - f.write_all(HTML_CANVAS.as_bytes())?; - self.write_unit_table(&mut f)?; - // It helps with pixel alignment to use whole numbers. - writeln!( - f, - "\n\ - \n\ - \n\ - ", - include_str!("timings.js") - )?; - drop(f); - let msg = format!( - "report saved to {}", - std::env::current_dir() - .unwrap_or_default() - .join(&filename) - .display() - ); - paths::link_or_copy(&filename, "cargo-timing.html")?; - self.config - .shell() - .status_with_color("Timing", msg, termcolor::Color::Cyan)?; - Ok(()) - } - - /// Render the summary table. - fn write_summary_table( - &self, - f: &mut impl Write, - duration: f64, - bcx: &BuildContext<'_, '_>, - error: &Option, - ) -> CargoResult<()> { - let targets: Vec = self - .root_targets - .iter() - .map(|(name, targets)| format!("{} ({})", name, targets.join(", "))) - .collect(); - let targets = targets.join("
"); - let time_human = if duration > 60.0 { - format!(" ({}m {:.1}s)", duration as u32 / 60, duration % 60.0) - } else { - "".to_string() - }; - let total_time = format!("{:.1}s{}", duration, time_human); - let max_concurrency = self.concurrency.iter().map(|c| c.active).max().unwrap(); - let max_rustc_concurrency = self - .concurrency - .iter() - .map(|c| c.rustc_parallelism) - .max() - .unwrap(); - let rustc_info = render_rustc_info(bcx); - let error_msg = match error { - Some(e) => format!( - r#"\ - - Error:{} - -"#, - e - ), - None => "".to_string(), - }; - write!( - f, - r#" - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -{} -
Targets:{}
Profile:{}
Fresh units:{}
Dirty units:{}
Total units:{}
Max concurrency:{} (jobs={} ncpu={})
Build start:{}
Total time:{}
rustc:{}
Max (global) rustc threads concurrency:{}
-"#, - targets, - self.profile, - self.total_fresh, - self.total_dirty, - self.total_fresh + self.total_dirty, - max_concurrency, - bcx.build_config.jobs, - num_cpus::get(), - self.start_str, - total_time, - rustc_info, - max_rustc_concurrency, - error_msg, - )?; - Ok(()) - } - - fn write_js_data(&self, f: &mut impl Write) -> CargoResult<()> { - // Create a map to link indices of unlocked units. - let unit_map: HashMap = self - .unit_times - .iter() - .enumerate() - .map(|(i, ut)| (ut.unit.clone(), i)) - .collect(); - #[derive(serde::Serialize)] - struct UnitData { - i: usize, - name: String, - version: String, - mode: String, - target: String, - start: f64, - duration: f64, - rmeta_time: Option, - unlocked_units: Vec, - unlocked_rmeta_units: Vec, - } - let round = |x: f64| (x * 100.0).round() / 100.0; - let unit_data: Vec = self - .unit_times - .iter() - .enumerate() - .map(|(i, ut)| { - let mode = if ut.unit.mode.is_run_custom_build() { - "run-custom-build" - } else { - "todo" - } - .to_string(); - - // These filter on the unlocked units because not all unlocked - // units are actually "built". For example, Doctest mode units - // don't actually generate artifacts. - let unlocked_units: Vec = ut - .unlocked_units - .iter() - .filter_map(|unit| unit_map.get(unit).copied()) - .collect(); - let unlocked_rmeta_units: Vec = ut - .unlocked_rmeta_units - .iter() - .filter_map(|unit| unit_map.get(unit).copied()) - .collect(); - UnitData { - i, - name: ut.unit.pkg.name().to_string(), - version: ut.unit.pkg.version().to_string(), - mode, - target: ut.target.clone(), - start: round(ut.start), - duration: round(ut.duration), - rmeta_time: ut.rmeta_time.map(round), - unlocked_units, - unlocked_rmeta_units, - } - }) - .collect(); - writeln!( - f, - "const UNIT_DATA = {};", - serde_json::to_string_pretty(&unit_data)? - )?; - writeln!( - f, - "const CONCURRENCY_DATA = {};", - serde_json::to_string_pretty(&self.concurrency)? - )?; - writeln!( - f, - "const CPU_USAGE = {};", - serde_json::to_string_pretty(&self.cpu_usage)? - )?; - Ok(()) - } - - /// Render the table of all units. - fn write_unit_table(&self, f: &mut impl Write) -> CargoResult<()> { - write!( - f, - r#" - - - - - - - - - - - -"# - )?; - let mut units: Vec<&UnitTime> = self.unit_times.iter().collect(); - units.sort_unstable_by(|a, b| b.duration.partial_cmp(&a.duration).unwrap()); - for (i, unit) in units.iter().enumerate() { - let codegen = match unit.codegen_time() { - None => "".to_string(), - Some((_rt, ctime, cent)) => format!("{:.1}s ({:.0}%)", ctime, cent), - }; - let features = unit.unit.features.join(", "); - write!( - f, - r#" - - - - - - - -"#, - i + 1, - unit.name_ver(), - unit.target, - unit.duration, - codegen, - features, - )?; - } - write!(f, "\n
UnitTotalCodegenFeatures
{}.{}{}{:.1}s{}{}
\n")?; - Ok(()) - } -} - -impl UnitTime { - /// Returns the codegen time as (rmeta_time, codegen_time, percent of total) - fn codegen_time(&self) -> Option<(f64, f64, f64)> { - self.rmeta_time.map(|rmeta_time| { - let ctime = self.duration - rmeta_time; - let cent = (ctime / self.duration) * 100.0; - (rmeta_time, ctime, cent) - }) - } - - fn name_ver(&self) -> String { - format!("{} v{}", self.unit.pkg.name(), self.unit.pkg.version()) - } -} - -fn render_rustc_info(bcx: &BuildContext<'_, '_>) -> String { - let version = bcx - .rustc() - .verbose_version - .lines() - .next() - .expect("rustc version"); - let requested_target = bcx - .build_config - .requested_kinds - .iter() - .map(|kind| bcx.target_data.short_name(kind)) - .collect::>() - .join(", "); - format!( - "{}
Host: {}
Target: {}", - version, - bcx.rustc().host, - requested_target - ) -} - -static HTML_TMPL: &str = r#" - - - Cargo Build Timings โ€” {ROOTS} - - - - - -

Cargo Build Timings

-"#; - -static HTML_CANVAS: &str = r#" - - - - - - - - - - - - - -
- -
- - -
-
- -
-"#; diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit.rs deleted file mode 100644 index 71b4538c4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit.rs +++ /dev/null @@ -1,226 +0,0 @@ -use crate::core::compiler::{CompileKind, CompileMode, CrateType}; -use crate::core::manifest::{Target, TargetKind}; -use crate::core::{profiles::Profile, Package}; -use crate::util::hex::short_hash; -use crate::util::interning::InternedString; -use crate::util::Config; -use std::cell::RefCell; -use std::collections::HashSet; -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::ops::Deref; -use std::rc::Rc; - -/// All information needed to define a unit. -/// -/// A unit is an object that has enough information so that cargo knows how to build it. -/// For example, if your package has dependencies, then every dependency will be built as a library -/// unit. If your package is a library, then it will be built as a library unit as well, or if it -/// is a binary with `main.rs`, then a binary will be output. There are also separate unit types -/// for `test`ing and `check`ing, amongst others. -/// -/// The unit also holds information about all possible metadata about the package in `pkg`. -/// -/// A unit needs to know extra information in addition to the type and root source file. For -/// example, it needs to know the target architecture (OS, chip arch etc.) and it needs to know -/// whether you want a debug or release build. There is enough information in this struct to figure -/// all that out. -#[derive(Clone, PartialOrd, Ord)] -pub struct Unit { - inner: Rc, -} - -/// Internal fields of `Unit` which `Unit` will dereference to. -#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct UnitInner { - /// Information about available targets, which files to include/exclude, etc. Basically stuff in - /// `Cargo.toml`. - pub pkg: Package, - /// Information about the specific target to build, out of the possible targets in `pkg`. Not - /// to be confused with *target-triple* (or *target architecture* ...), the target arch for a - /// build. - pub target: Target, - /// The profile contains information about *how* the build should be run, including debug - /// level, etc. - pub profile: Profile, - /// Whether this compilation unit is for the host or target architecture. - /// - /// For example, when - /// cross compiling and using a custom build script, the build script needs to be compiled for - /// the host architecture so the host rustc can use it (when compiling to the target - /// architecture). - pub kind: CompileKind, - /// The "mode" this unit is being compiled for. See [`CompileMode`] for more details. - pub mode: CompileMode, - /// The `cfg` features to enable for this unit. - /// This must be sorted. - pub features: Vec, - /// Whether this is a standard library unit. - pub is_std: bool, - /// A hash of all dependencies of this unit. - /// - /// This is used to keep the `Unit` unique in the situation where two - /// otherwise identical units need to link to different dependencies. This - /// can happen, for example, when there are shared dependencies that need - /// to be built with different features between normal and build - /// dependencies. See `rebuild_unit_graph_shared` for more on why this is - /// done. - /// - /// This value initially starts as 0, and then is filled in via a - /// second-pass after all the unit dependencies have been computed. - pub dep_hash: u64, -} - -impl UnitInner { - /// Returns whether compilation of this unit requires all upstream artifacts - /// to be available. - /// - /// This effectively means that this unit is a synchronization point (if the - /// return value is `true`) that all previously pipelined units need to - /// finish in their entirety before this one is started. - pub fn requires_upstream_objects(&self) -> bool { - self.mode.is_any_test() || self.target.kind().requires_upstream_objects() - } - - /// Returns whether or not this is a "local" package. - /// - /// A "local" package is one that the user can likely edit, or otherwise - /// wants warnings, etc. - pub fn is_local(&self) -> bool { - self.pkg.package_id().source_id().is_path() && !self.is_std - } - - /// Returns whether or not warnings should be displayed for this unit. - pub fn show_warnings(&self, config: &Config) -> bool { - self.is_local() || config.extra_verbose() - } -} - -impl Unit { - pub fn buildkey(&self) -> String { - format!("{}-{}", self.pkg.name(), short_hash(self)) - } -} - -// Just hash the pointer for fast hashing -impl Hash for Unit { - fn hash(&self, hasher: &mut H) { - std::ptr::hash(&*self.inner, hasher) - } -} - -// Just equate the pointer since these are interned -impl PartialEq for Unit { - fn eq(&self, other: &Unit) -> bool { - std::ptr::eq(&*self.inner, &*other.inner) - } -} - -impl Eq for Unit {} - -impl Deref for Unit { - type Target = UnitInner; - - fn deref(&self) -> &UnitInner { - &*self.inner - } -} - -impl fmt::Debug for Unit { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Unit") - .field("pkg", &self.pkg) - .field("target", &self.target) - .field("profile", &self.profile) - .field("kind", &self.kind) - .field("mode", &self.mode) - .field("features", &self.features) - .field("is_std", &self.is_std) - .field("dep_hash", &self.dep_hash) - .finish() - } -} - -/// A small structure used to "intern" `Unit` values. -/// -/// A `Unit` is just a thin pointer to an internal `UnitInner`. This is done to -/// ensure that `Unit` itself is quite small as well as enabling a very -/// efficient hash/equality implementation for `Unit`. All units are -/// manufactured through an interner which guarantees that each equivalent value -/// is only produced once. -pub struct UnitInterner { - state: RefCell, -} - -struct InternerState { - cache: HashSet>, -} - -impl UnitInterner { - /// Creates a new blank interner - pub fn new() -> UnitInterner { - UnitInterner { - state: RefCell::new(InternerState { - cache: HashSet::new(), - }), - } - } - - /// Creates a new `unit` from its components. The returned `Unit`'s fields - /// will all be equivalent to the provided arguments, although they may not - /// be the exact same instance. - pub fn intern( - &self, - pkg: &Package, - target: &Target, - profile: Profile, - kind: CompileKind, - mode: CompileMode, - features: Vec, - is_std: bool, - dep_hash: u64, - ) -> Unit { - let target = match (is_std, target.kind()) { - // This is a horrible hack to support build-std. `libstd` declares - // itself with both rlib and dylib. We don't want the dylib for a - // few reasons: - // - // - dylibs don't have a hash in the filename. If you do something - // (like switch rustc versions), it will stomp on the dylib - // file, invalidating the entire cache (because std is a dep of - // everything). - // - We don't want to publicize the presence of dylib for the - // standard library. - // - // At some point in the future, it would be nice to have a - // first-class way of overriding or specifying crate-types. - (true, TargetKind::Lib(crate_types)) if crate_types.contains(&CrateType::Dylib) => { - let mut new_target = Target::clone(target); - new_target.set_kind(TargetKind::Lib(vec![CrateType::Rlib])); - new_target - } - _ => target.clone(), - }; - let inner = self.intern_inner(&UnitInner { - pkg: pkg.clone(), - target, - profile, - kind, - mode, - features, - is_std, - dep_hash, - }); - Unit { inner } - } - - fn intern_inner(&self, item: &UnitInner) -> Rc { - let mut me = self.state.borrow_mut(); - if let Some(item) = me.cache.get(item) { - return item.clone(); - } - let item = Rc::new(item.clone()); - me.cache.insert(item.clone()); - item - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit_dependencies.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit_dependencies.rs deleted file mode 100644 index c2575fd77..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit_dependencies.rs +++ /dev/null @@ -1,870 +0,0 @@ -//! Constructs the dependency graph for compilation. -//! -//! Rust code is typically organized as a set of Cargo packages. The -//! dependencies between the packages themselves are stored in the -//! `Resolve` struct. However, we can't use that information as is for -//! compilation! A package typically contains several targets, or crates, -//! and these targets has inter-dependencies. For example, you need to -//! compile the `lib` target before the `bin` one, and you need to compile -//! `build.rs` before either of those. -//! -//! So, we need to lower the `Resolve`, which specifies dependencies between -//! *packages*, to a graph of dependencies between their *targets*, and this -//! is exactly what this module is doing! Well, almost exactly: another -//! complication is that we might want to compile the same target several times -//! (for example, with and without tests), so we actually build a dependency -//! graph of `Unit`s, which capture these properties. - -use crate::core::compiler::unit_graph::{UnitDep, UnitGraph}; -use crate::core::compiler::UnitInterner; -use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit}; -use crate::core::dependency::DepKind; -use crate::core::profiles::{Profile, Profiles, UnitFor}; -use crate::core::resolver::features::{FeaturesFor, ResolvedFeatures}; -use crate::core::resolver::Resolve; -use crate::core::{Dependency, Package, PackageId, PackageSet, Target, Workspace}; -use crate::ops::resolve_all_features; -use crate::util::interning::InternedString; -use crate::util::Config; -use crate::CargoResult; -use log::trace; -use std::collections::{HashMap, HashSet}; - -/// Collection of stuff used while creating the `UnitGraph`. -struct State<'a, 'cfg> { - ws: &'a Workspace<'cfg>, - config: &'cfg Config, - unit_dependencies: UnitGraph, - package_set: &'a PackageSet<'cfg>, - usr_resolve: &'a Resolve, - usr_features: &'a ResolvedFeatures, - std_resolve: Option<&'a Resolve>, - std_features: Option<&'a ResolvedFeatures>, - /// This flag is `true` while generating the dependencies for the standard - /// library. - is_std: bool, - global_mode: CompileMode, - target_data: &'a RustcTargetData<'cfg>, - profiles: &'a Profiles, - interner: &'a UnitInterner, - scrape_units: &'a [Unit], - - /// A set of edges in `unit_dependencies` where (a, b) means that the - /// dependency from a to b was added purely because it was a dev-dependency. - /// This is used during `connect_run_custom_build_deps`. - dev_dependency_edges: HashSet<(Unit, Unit)>, -} - -pub fn build_unit_dependencies<'a, 'cfg>( - ws: &'a Workspace<'cfg>, - package_set: &'a PackageSet<'cfg>, - resolve: &'a Resolve, - features: &'a ResolvedFeatures, - std_resolve: Option<&'a (Resolve, ResolvedFeatures)>, - roots: &[Unit], - scrape_units: &[Unit], - std_roots: &HashMap>, - global_mode: CompileMode, - target_data: &'a RustcTargetData<'cfg>, - profiles: &'a Profiles, - interner: &'a UnitInterner, -) -> CargoResult { - if roots.is_empty() { - // If -Zbuild-std, don't attach units if there is nothing to build. - // Otherwise, other parts of the code may be confused by seeing units - // in the dep graph without a root. - return Ok(HashMap::new()); - } - let (std_resolve, std_features) = match std_resolve { - Some((r, f)) => (Some(r), Some(f)), - None => (None, None), - }; - let mut state = State { - ws, - config: ws.config(), - unit_dependencies: HashMap::new(), - package_set, - usr_resolve: resolve, - usr_features: features, - std_resolve, - std_features, - is_std: false, - global_mode, - target_data, - profiles, - interner, - scrape_units, - dev_dependency_edges: HashSet::new(), - }; - - let std_unit_deps = calc_deps_of_std(&mut state, std_roots)?; - - deps_of_roots(roots, &mut state)?; - super::links::validate_links(state.resolve(), &state.unit_dependencies)?; - // Hopefully there aren't any links conflicts with the standard library? - - if let Some(std_unit_deps) = std_unit_deps { - attach_std_deps(&mut state, std_roots, std_unit_deps); - } - - connect_run_custom_build_deps(&mut state); - - // Dependencies are used in tons of places throughout the backend, many of - // which affect the determinism of the build itself. As a result be sure - // that dependency lists are always sorted to ensure we've always got a - // deterministic output. - for list in state.unit_dependencies.values_mut() { - list.sort(); - } - trace!("ALL UNIT DEPENDENCIES {:#?}", state.unit_dependencies); - - Ok(state.unit_dependencies) -} - -/// Compute all the dependencies for the standard library. -fn calc_deps_of_std( - mut state: &mut State<'_, '_>, - std_roots: &HashMap>, -) -> CargoResult> { - if std_roots.is_empty() { - return Ok(None); - } - // Compute dependencies for the standard library. - state.is_std = true; - for roots in std_roots.values() { - deps_of_roots(roots, state)?; - } - state.is_std = false; - Ok(Some(std::mem::take(&mut state.unit_dependencies))) -} - -/// Add the standard library units to the `unit_dependencies`. -fn attach_std_deps( - state: &mut State<'_, '_>, - std_roots: &HashMap>, - std_unit_deps: UnitGraph, -) { - // Attach the standard library as a dependency of every target unit. - let mut found = false; - for (unit, deps) in state.unit_dependencies.iter_mut() { - if !unit.kind.is_host() && !unit.mode.is_run_custom_build() { - deps.extend(std_roots[&unit.kind].iter().map(|unit| UnitDep { - unit: unit.clone(), - unit_for: UnitFor::new_normal(), - extern_crate_name: unit.pkg.name(), - // TODO: Does this `public` make sense? - public: true, - noprelude: true, - })); - found = true; - } - } - // And also include the dependencies of the standard library itself. Don't - // include these if no units actually needed the standard library. - if found { - for (unit, deps) in std_unit_deps.into_iter() { - if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) { - panic!("std unit collision with existing unit: {:?}", other_unit); - } - } - } -} - -/// Compute all the dependencies of the given root units. -/// The result is stored in state.unit_dependencies. -fn deps_of_roots(roots: &[Unit], state: &mut State<'_, '_>) -> CargoResult<()> { - for unit in roots.iter() { - // Dependencies of tests/benches should not have `panic` set. - // We check the global test mode to see if we are running in `cargo - // test` in which case we ensure all dependencies have `panic` - // cleared, and avoid building the lib thrice (once with `panic`, once - // without, once for `--test`). In particular, the lib included for - // Doc tests and examples are `Build` mode here. - let unit_for = if unit.mode.is_any_test() || state.global_mode.is_rustc_test() { - if unit.target.proc_macro() { - // Special-case for proc-macros, which are forced to for-host - // since they need to link with the proc_macro crate. - UnitFor::new_host_test(state.config) - } else { - UnitFor::new_test(state.config) - } - } else if unit.target.is_custom_build() { - // This normally doesn't happen, except `clean` aggressively - // generates all units. - UnitFor::new_host(false) - } else if unit.target.proc_macro() { - UnitFor::new_host(true) - } else if unit.target.for_host() { - // Plugin should never have panic set. - UnitFor::new_compiler() - } else { - UnitFor::new_normal() - }; - deps_of(unit, state, unit_for)?; - } - - Ok(()) -} - -/// Compute the dependencies of a single unit. -fn deps_of(unit: &Unit, state: &mut State<'_, '_>, unit_for: UnitFor) -> CargoResult<()> { - // Currently the `unit_dependencies` map does not include `unit_for`. This should - // be safe for now. `TestDependency` only exists to clear the `panic` - // flag, and you'll never ask for a `unit` with `panic` set as a - // `TestDependency`. `CustomBuild` should also be fine since if the - // requested unit's settings are the same as `Any`, `CustomBuild` can't - // affect anything else in the hierarchy. - if !state.unit_dependencies.contains_key(unit) { - let unit_deps = compute_deps(unit, state, unit_for)?; - state - .unit_dependencies - .insert(unit.clone(), unit_deps.clone()); - for unit_dep in unit_deps { - deps_of(&unit_dep.unit, state, unit_dep.unit_for)?; - } - } - Ok(()) -} - -/// For a package, returns all targets that are registered as dependencies -/// for that package. -/// This returns a `Vec` of `(Unit, UnitFor)` pairs. The `UnitFor` -/// is the profile type that should be used for dependencies of the unit. -fn compute_deps( - unit: &Unit, - state: &mut State<'_, '_>, - unit_for: UnitFor, -) -> CargoResult> { - if unit.mode.is_run_custom_build() { - return compute_deps_custom_build(unit, unit_for, state); - } else if unit.mode.is_doc() { - // Note: this does not include doc test. - return compute_deps_doc(unit, state, unit_for); - } - - let id = unit.pkg.package_id(); - let filtered_deps = state.deps(unit, unit_for, &|dep| { - // If this target is a build command, then we only want build - // dependencies, otherwise we want everything *other than* build - // dependencies. - if unit.target.is_custom_build() != dep.is_build() { - return false; - } - - // If this dependency is **not** a transitive dependency, then it - // only applies to test/example targets. - if !dep.is_transitive() - && !unit.target.is_test() - && !unit.target.is_example() - && !unit.mode.is_doc_scrape() - && !unit.mode.is_any_test() - { - return false; - } - - // If we've gotten past all that, then this dependency is - // actually used! - true - }); - - let mut ret = Vec::new(); - let mut dev_deps = Vec::new(); - for (id, deps) in filtered_deps { - let pkg = state.get(id); - let lib = match pkg.targets().iter().find(|t| t.is_lib()) { - Some(t) => t, - None => continue, - }; - let mode = check_or_build_mode(unit.mode, lib); - let dep_unit_for = unit_for.with_dependency(unit, lib); - - let start = ret.len(); - if state.config.cli_unstable().dual_proc_macros && lib.proc_macro() && !unit.kind.is_host() - { - let unit_dep = new_unit_dep(state, unit, pkg, lib, dep_unit_for, unit.kind, mode)?; - ret.push(unit_dep); - let unit_dep = - new_unit_dep(state, unit, pkg, lib, dep_unit_for, CompileKind::Host, mode)?; - ret.push(unit_dep); - } else { - let unit_dep = new_unit_dep( - state, - unit, - pkg, - lib, - dep_unit_for, - unit.kind.for_target(lib), - mode, - )?; - ret.push(unit_dep); - } - - // If the unit added was a dev-dependency unit, then record that in the - // dev-dependencies array. We'll add this to - // `state.dev_dependency_edges` at the end and process it later in - // `connect_run_custom_build_deps`. - if deps.iter().all(|d| !d.is_transitive()) { - for dep in ret[start..].iter() { - dev_deps.push((unit.clone(), dep.unit.clone())); - } - } - } - state.dev_dependency_edges.extend(dev_deps); - - // If this target is a build script, then what we've collected so far is - // all we need. If this isn't a build script, then it depends on the - // build script if there is one. - if unit.target.is_custom_build() { - return Ok(ret); - } - ret.extend(dep_build_script(unit, unit_for, state)?); - - // If this target is a binary, test, example, etc, then it depends on - // the library of the same package. The call to `resolve.deps` above - // didn't include `pkg` in the return values, so we need to special case - // it here and see if we need to push `(pkg, pkg_lib_target)`. - if unit.target.is_lib() && unit.mode != CompileMode::Doctest { - return Ok(ret); - } - ret.extend(maybe_lib(unit, state, unit_for)?); - - // If any integration tests/benches are being run, make sure that - // binaries are built as well. - if !unit.mode.is_check() - && unit.mode.is_any_test() - && (unit.target.is_test() || unit.target.is_bench()) - { - ret.extend( - unit.pkg - .targets() - .iter() - .filter(|t| { - // Skip binaries with required features that have not been selected. - match t.required_features() { - Some(rf) if t.is_bin() => { - let features = resolve_all_features( - state.resolve(), - state.features(), - state.package_set, - id, - ); - rf.iter().all(|f| features.contains(f)) - } - None if t.is_bin() => true, - _ => false, - } - }) - .map(|t| { - new_unit_dep( - state, - unit, - &unit.pkg, - t, - UnitFor::new_normal(), - unit.kind.for_target(t), - CompileMode::Build, - ) - }) - .collect::>>()?, - ); - } - - Ok(ret) -} - -/// Returns the dependencies needed to run a build script. -/// -/// The `unit` provided must represent an execution of a build script, and -/// the returned set of units must all be run before `unit` is run. -fn compute_deps_custom_build( - unit: &Unit, - unit_for: UnitFor, - state: &mut State<'_, '_>, -) -> CargoResult> { - if let Some(links) = unit.pkg.manifest().links() { - if state - .target_data - .script_override(links, unit.kind) - .is_some() - { - // Overridden build scripts don't have any dependencies. - return Ok(Vec::new()); - } - } - // All dependencies of this unit should use profiles for custom builds. - // If this is a build script of a proc macro, make sure it uses host - // features. - let script_unit_for = UnitFor::new_host(unit_for.is_for_host_features()); - // When not overridden, then the dependencies to run a build script are: - // - // 1. Compiling the build script itself. - // 2. For each immediate dependency of our package which has a `links` - // key, the execution of that build script. - // - // We don't have a great way of handling (2) here right now so this is - // deferred until after the graph of all unit dependencies has been - // constructed. - let unit_dep = new_unit_dep( - state, - unit, - &unit.pkg, - &unit.target, - script_unit_for, - // Build scripts always compiled for the host. - CompileKind::Host, - CompileMode::Build, - )?; - Ok(vec![unit_dep]) -} - -/// Returns the dependencies necessary to document a package. -fn compute_deps_doc( - unit: &Unit, - state: &mut State<'_, '_>, - unit_for: UnitFor, -) -> CargoResult> { - let deps = state.deps(unit, unit_for, &|dep| dep.kind() == DepKind::Normal); - - // To document a library, we depend on dependencies actually being - // built. If we're documenting *all* libraries, then we also depend on - // the documentation of the library being built. - let mut ret = Vec::new(); - for (id, _deps) in deps { - let dep = state.get(id); - let lib = match dep.targets().iter().find(|t| t.is_lib()) { - Some(lib) => lib, - None => continue, - }; - // Rustdoc only needs rmeta files for regular dependencies. - // However, for plugins/proc macros, deps should be built like normal. - let mode = check_or_build_mode(unit.mode, lib); - let dep_unit_for = unit_for.with_dependency(unit, lib); - let lib_unit_dep = new_unit_dep( - state, - unit, - dep, - lib, - dep_unit_for, - unit.kind.for_target(lib), - mode, - )?; - ret.push(lib_unit_dep); - if lib.documented() { - if let CompileMode::Doc { deps: true } = unit.mode { - // Document this lib as well. - let doc_unit_dep = new_unit_dep( - state, - unit, - dep, - lib, - dep_unit_for, - unit.kind.for_target(lib), - unit.mode, - )?; - ret.push(doc_unit_dep); - } - } - } - - // Be sure to build/run the build script for documented libraries. - ret.extend(dep_build_script(unit, unit_for, state)?); - - // If we document a binary/example, we need the library available. - if unit.target.is_bin() || unit.target.is_example() { - // build the lib - ret.extend(maybe_lib(unit, state, unit_for)?); - // and also the lib docs for intra-doc links - if let Some(lib) = unit - .pkg - .targets() - .iter() - .find(|t| t.is_linkable() && t.documented()) - { - let dep_unit_for = unit_for.with_dependency(unit, lib); - let lib_doc_unit = new_unit_dep( - state, - unit, - &unit.pkg, - lib, - dep_unit_for, - unit.kind.for_target(lib), - unit.mode, - )?; - ret.push(lib_doc_unit); - } - } - - // Add all units being scraped for examples as a dependency of Doc units. - if state.ws.is_member(&unit.pkg) { - for scrape_unit in state.scrape_units.iter() { - // This needs to match the FeaturesFor used in cargo_compile::generate_targets. - let unit_for = UnitFor::new_host(scrape_unit.target.proc_macro()); - deps_of(scrape_unit, state, unit_for)?; - ret.push(new_unit_dep( - state, - scrape_unit, - &scrape_unit.pkg, - &scrape_unit.target, - unit_for, - scrape_unit.kind, - scrape_unit.mode, - )?); - } - } - - Ok(ret) -} - -fn maybe_lib( - unit: &Unit, - state: &mut State<'_, '_>, - unit_for: UnitFor, -) -> CargoResult> { - unit.pkg - .targets() - .iter() - .find(|t| t.is_linkable()) - .map(|t| { - let mode = check_or_build_mode(unit.mode, t); - let dep_unit_for = unit_for.with_dependency(unit, t); - new_unit_dep( - state, - unit, - &unit.pkg, - t, - dep_unit_for, - unit.kind.for_target(t), - mode, - ) - }) - .transpose() -} - -/// If a build script is scheduled to be run for the package specified by -/// `unit`, this function will return the unit to run that build script. -/// -/// Overriding a build script simply means that the running of the build -/// script itself doesn't have any dependencies, so even in that case a unit -/// of work is still returned. `None` is only returned if the package has no -/// build script. -fn dep_build_script( - unit: &Unit, - unit_for: UnitFor, - state: &State<'_, '_>, -) -> CargoResult> { - unit.pkg - .targets() - .iter() - .find(|t| t.is_custom_build()) - .map(|t| { - // The profile stored in the Unit is the profile for the thing - // the custom build script is running for. - let profile = state.profiles.get_profile_run_custom_build(&unit.profile); - // UnitFor::new_host is used because we want the `host` flag set - // for all of our build dependencies (so they all get - // build-override profiles), including compiling the build.rs - // script itself. - // - // If `is_for_host_features` here is `false`, that means we are a - // build.rs script for a normal dependency and we want to set the - // CARGO_FEATURE_* environment variables to the features as a - // normal dep. - // - // If `is_for_host_features` here is `true`, that means that this - // package is being used as a build dependency or proc-macro, and - // so we only want to set CARGO_FEATURE_* variables for the host - // side of the graph. - // - // Keep in mind that the RunCustomBuild unit and the Compile - // build.rs unit use the same features. This is because some - // people use `cfg!` and `#[cfg]` expressions to check for enabled - // features instead of just checking `CARGO_FEATURE_*` at runtime. - // In the case with the new feature resolver (decoupled host - // deps), and a shared dependency has different features enabled - // for normal vs. build, then the build.rs script will get - // compiled twice. I believe it is not feasible to only build it - // once because it would break a large number of scripts (they - // would think they have the wrong set of features enabled). - let script_unit_for = UnitFor::new_host(unit_for.is_for_host_features()); - new_unit_dep_with_profile( - state, - unit, - &unit.pkg, - t, - script_unit_for, - unit.kind, - CompileMode::RunCustomBuild, - profile, - ) - }) - .transpose() -} - -/// Choose the correct mode for dependencies. -fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode { - match mode { - CompileMode::Check { .. } | CompileMode::Doc { .. } | CompileMode::Docscrape => { - if target.for_host() { - // Plugin and proc macro targets should be compiled like - // normal. - CompileMode::Build - } else { - // Regular dependencies should not be checked with --test. - // Regular dependencies of doc targets should emit rmeta only. - CompileMode::Check { test: false } - } - } - _ => CompileMode::Build, - } -} - -/// Create a new Unit for a dependency from `parent` to `pkg` and `target`. -fn new_unit_dep( - state: &State<'_, '_>, - parent: &Unit, - pkg: &Package, - target: &Target, - unit_for: UnitFor, - kind: CompileKind, - mode: CompileMode, -) -> CargoResult { - let is_local = pkg.package_id().source_id().is_path() && !state.is_std; - let profile = state.profiles.get_profile( - pkg.package_id(), - state.ws.is_member(pkg), - is_local, - unit_for, - mode, - kind, - ); - new_unit_dep_with_profile(state, parent, pkg, target, unit_for, kind, mode, profile) -} - -fn new_unit_dep_with_profile( - state: &State<'_, '_>, - parent: &Unit, - pkg: &Package, - target: &Target, - unit_for: UnitFor, - kind: CompileKind, - mode: CompileMode, - profile: Profile, -) -> CargoResult { - // TODO: consider making extern_crate_name return InternedString? - let extern_crate_name = InternedString::new(&state.resolve().extern_crate_name( - parent.pkg.package_id(), - pkg.package_id(), - target, - )?); - let public = state - .resolve() - .is_public_dep(parent.pkg.package_id(), pkg.package_id()); - let features_for = unit_for.map_to_features_for(); - let features = state.activated_features(pkg.package_id(), features_for); - let unit = state - .interner - .intern(pkg, target, profile, kind, mode, features, state.is_std, 0); - Ok(UnitDep { - unit, - unit_for, - extern_crate_name, - public, - noprelude: false, - }) -} - -/// Fill in missing dependencies for units of the `RunCustomBuild` -/// -/// As mentioned above in `compute_deps_custom_build` each build script -/// execution has two dependencies. The first is compiling the build script -/// itself (already added) and the second is that all crates the package of the -/// build script depends on with `links` keys, their build script execution. (a -/// bit confusing eh?) -/// -/// Here we take the entire `deps` map and add more dependencies from execution -/// of one build script to execution of another build script. -fn connect_run_custom_build_deps(state: &mut State<'_, '_>) { - let mut new_deps = Vec::new(); - - { - let state = &*state; - // First up build a reverse dependency map. This is a mapping of all - // `RunCustomBuild` known steps to the unit which depends on them. For - // example a library might depend on a build script, so this map will - // have the build script as the key and the library would be in the - // value's set. - let mut reverse_deps_map = HashMap::new(); - for (unit, deps) in state.unit_dependencies.iter() { - for dep in deps { - if dep.unit.mode == CompileMode::RunCustomBuild { - reverse_deps_map - .entry(dep.unit.clone()) - .or_insert_with(HashSet::new) - .insert(unit); - } - } - } - - // Next, we take a look at all build scripts executions listed in the - // dependency map. Our job here is to take everything that depends on - // this build script (from our reverse map above) and look at the other - // package dependencies of these parents. - // - // If we depend on a linkable target and the build script mentions - // `links`, then we depend on that package's build script! Here we use - // `dep_build_script` to manufacture an appropriate build script unit to - // depend on. - for unit in state - .unit_dependencies - .keys() - .filter(|k| k.mode == CompileMode::RunCustomBuild) - { - // This list of dependencies all depend on `unit`, an execution of - // the build script. - let reverse_deps = match reverse_deps_map.get(unit) { - Some(set) => set, - None => continue, - }; - - let to_add = reverse_deps - .iter() - // Get all sibling dependencies of `unit` - .flat_map(|reverse_dep| { - state.unit_dependencies[reverse_dep] - .iter() - .map(move |a| (reverse_dep, a)) - }) - // Only deps with `links`. - .filter(|(_parent, other)| { - other.unit.pkg != unit.pkg - && other.unit.target.is_linkable() - && other.unit.pkg.manifest().links().is_some() - }) - // Avoid cycles when using the doc --scrape-examples feature: - // Say a workspace has crates A and B where A has a build-dependency on B. - // The Doc units for A and B will have a dependency on the Docscrape for both A and B. - // So this would add a dependency from B-build to A-build, causing a cycle: - // B (build) -> A (build) -> B(build) - // See the test scrape_examples_avoid_build_script_cycle for a concrete example. - // To avoid this cycle, we filter out the B -> A (docscrape) dependency. - .filter(|(_parent, other)| !other.unit.mode.is_doc_scrape()) - // Skip dependencies induced via dev-dependencies since - // connections between `links` and build scripts only happens - // via normal dependencies. Otherwise since dev-dependencies can - // be cyclic we could have cyclic build-script executions. - .filter_map(move |(parent, other)| { - if state - .dev_dependency_edges - .contains(&((*parent).clone(), other.unit.clone())) - { - None - } else { - Some(other) - } - }) - // Get the RunCustomBuild for other lib. - .filter_map(|other| { - state.unit_dependencies[&other.unit] - .iter() - .find(|other_dep| other_dep.unit.mode == CompileMode::RunCustomBuild) - .cloned() - }) - .collect::>(); - - if !to_add.is_empty() { - // (RunCustomBuild, set(other RunCustomBuild)) - new_deps.push((unit.clone(), to_add)); - } - } - } - - // And finally, add in all the missing dependencies! - for (unit, new_deps) in new_deps { - state - .unit_dependencies - .get_mut(&unit) - .unwrap() - .extend(new_deps); - } -} - -impl<'a, 'cfg> State<'a, 'cfg> { - fn resolve(&self) -> &'a Resolve { - if self.is_std { - self.std_resolve.unwrap() - } else { - self.usr_resolve - } - } - - fn features(&self) -> &'a ResolvedFeatures { - if self.is_std { - self.std_features.unwrap() - } else { - self.usr_features - } - } - - fn activated_features( - &self, - pkg_id: PackageId, - features_for: FeaturesFor, - ) -> Vec { - let features = self.features(); - features.activated_features(pkg_id, features_for) - } - - fn is_dep_activated( - &self, - pkg_id: PackageId, - features_for: FeaturesFor, - dep_name: InternedString, - ) -> bool { - self.features() - .is_dep_activated(pkg_id, features_for, dep_name) - } - - fn get(&self, id: PackageId) -> &'a Package { - self.package_set - .get_one(id) - .unwrap_or_else(|_| panic!("expected {} to be downloaded", id)) - } - - /// Returns a filtered set of dependencies for the given unit. - fn deps( - &self, - unit: &Unit, - unit_for: UnitFor, - filter: &dyn Fn(&Dependency) -> bool, - ) -> Vec<(PackageId, &HashSet)> { - let pkg_id = unit.pkg.package_id(); - let kind = unit.kind; - self.resolve() - .deps(pkg_id) - .filter(|&(_id, deps)| { - assert!(!deps.is_empty()); - deps.iter().any(|dep| { - if !filter(dep) { - return false; - } - // If this dependency is only available for certain platforms, - // make sure we're only enabling it for that platform. - if !self.target_data.dep_platform_activated(dep, kind) { - return false; - } - - // If this is an optional dependency, and the new feature resolver - // did not enable it, don't include it. - if dep.is_optional() { - let features_for = unit_for.map_to_features_for(); - if !self.is_dep_activated(pkg_id, features_for, dep.name_in_toml()) { - return false; - } - } - - true - }) - }) - .collect() - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit_graph.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit_graph.rs deleted file mode 100644 index 1357afb93..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/compiler/unit_graph.rs +++ /dev/null @@ -1,123 +0,0 @@ -use crate::core::compiler::Unit; -use crate::core::compiler::{CompileKind, CompileMode}; -use crate::core::profiles::{Profile, UnitFor}; -use crate::core::{PackageId, Target}; -use crate::util::interning::InternedString; -use crate::util::CargoResult; -use crate::Config; -use std::collections::HashMap; -use std::io::Write; - -/// The dependency graph of Units. -pub type UnitGraph = HashMap>; - -/// A unit dependency. -#[derive(Debug, Clone, Hash, Eq, PartialEq, PartialOrd, Ord)] -pub struct UnitDep { - /// The dependency unit. - pub unit: Unit, - /// The purpose of this dependency (a dependency for a test, or a build - /// script, etc.). Do not use this after the unit graph has been built. - pub unit_for: UnitFor, - /// The name the parent uses to refer to this dependency. - pub extern_crate_name: InternedString, - /// Whether or not this is a public dependency. - pub public: bool, - /// If `true`, the dependency should not be added to Rust's prelude. - pub noprelude: bool, -} - -const VERSION: u32 = 1; - -#[derive(serde::Serialize)] -struct SerializedUnitGraph<'a> { - version: u32, - units: Vec>, - roots: Vec, -} - -#[derive(serde::Serialize)] -struct SerializedUnit<'a> { - pkg_id: PackageId, - target: &'a Target, - profile: &'a Profile, - platform: CompileKind, - mode: CompileMode, - features: &'a Vec, - #[serde(skip_serializing_if = "std::ops::Not::not")] // hide for unstable build-std - is_std: bool, - dependencies: Vec, -} - -#[derive(serde::Serialize)] -struct SerializedUnitDep { - index: usize, - extern_crate_name: InternedString, - // This is only set on nightly since it is unstable. - #[serde(skip_serializing_if = "Option::is_none")] - public: Option, - // This is only set on nightly since it is unstable. - #[serde(skip_serializing_if = "Option::is_none")] - noprelude: Option, - // Intentionally not including `unit_for` because it is a low-level - // internal detail that is mostly used for building the graph. -} - -pub fn emit_serialized_unit_graph( - root_units: &[Unit], - unit_graph: &UnitGraph, - config: &Config, -) -> CargoResult<()> { - let mut units: Vec<(&Unit, &Vec)> = unit_graph.iter().collect(); - units.sort_unstable(); - // Create a map for quick lookup for dependencies. - let indices: HashMap<&Unit, usize> = units - .iter() - .enumerate() - .map(|(i, val)| (val.0, i)) - .collect(); - let roots = root_units.iter().map(|root| indices[root]).collect(); - let ser_units = units - .iter() - .map(|(unit, unit_deps)| { - let dependencies = unit_deps - .iter() - .map(|unit_dep| { - // https://github.com/rust-lang/rust/issues/64260 when stabilized. - let (public, noprelude) = if config.nightly_features_allowed { - (Some(unit_dep.public), Some(unit_dep.noprelude)) - } else { - (None, None) - }; - SerializedUnitDep { - index: indices[&unit_dep.unit], - extern_crate_name: unit_dep.extern_crate_name, - public, - noprelude, - } - }) - .collect(); - SerializedUnit { - pkg_id: unit.pkg.package_id(), - target: &unit.target, - profile: &unit.profile, - platform: unit.kind, - mode: unit.mode, - features: &unit.features, - is_std: unit.is_std, - dependencies, - } - }) - .collect(); - let s = SerializedUnitGraph { - version: VERSION, - units: ser_units, - roots, - }; - - let stdout = std::io::stdout(); - let mut lock = stdout.lock(); - serde_json::to_writer(&mut lock, &s)?; - drop(writeln!(lock)); - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/dependency.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/dependency.rs deleted file mode 100644 index 10fae7205..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/dependency.rs +++ /dev/null @@ -1,406 +0,0 @@ -use cargo_platform::Platform; -use log::trace; -use semver::VersionReq; -use serde::ser; -use serde::Serialize; -use std::path::PathBuf; -use std::rc::Rc; - -use crate::core::{PackageId, SourceId, Summary}; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::OptVersionReq; - -/// Information about a dependency requested by a Cargo manifest. -/// Cheap to copy. -#[derive(PartialEq, Eq, Hash, Clone, Debug)] -pub struct Dependency { - inner: Rc, -} - -/// The data underlying a `Dependency`. -#[derive(PartialEq, Eq, Hash, Clone, Debug)] -struct Inner { - name: InternedString, - source_id: SourceId, - /// Source ID for the registry as specified in the manifest. - /// - /// This will be None if it is not specified (crates.io dependency). - /// This is different from `source_id` for example when both a `path` and - /// `registry` is specified. Or in the case of a crates.io dependency, - /// `source_id` will be crates.io and this will be None. - registry_id: Option, - req: OptVersionReq, - specified_req: bool, - kind: DepKind, - only_match_name: bool, - explicit_name_in_toml: Option, - - optional: bool, - public: bool, - default_features: bool, - features: Vec, - - // This dependency should be used only for this platform. - // `None` means *all platforms*. - platform: Option, -} - -#[derive(Serialize)] -struct SerializedDependency<'a> { - name: &'a str, - source: SourceId, - req: String, - kind: DepKind, - rename: Option<&'a str>, - - optional: bool, - uses_default_features: bool, - features: &'a [InternedString], - target: Option<&'a Platform>, - /// The registry URL this dependency is from. - /// If None, then it comes from the default registry (crates.io). - registry: Option<&'a str>, - - /// The file system path for a local path dependency. - #[serde(skip_serializing_if = "Option::is_none")] - path: Option, -} - -impl ser::Serialize for Dependency { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - let registry_id = self.registry_id(); - SerializedDependency { - name: &*self.package_name(), - source: self.source_id(), - req: self.version_req().to_string(), - kind: self.kind(), - optional: self.is_optional(), - uses_default_features: self.uses_default_features(), - features: self.features(), - target: self.platform(), - rename: self.explicit_name_in_toml().map(|s| s.as_str()), - registry: registry_id.as_ref().map(|sid| sid.url().as_str()), - path: self.source_id().local_path(), - } - .serialize(s) - } -} - -#[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug, Copy)] -pub enum DepKind { - Normal, - Development, - Build, -} - -impl ser::Serialize for DepKind { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - match *self { - DepKind::Normal => None, - DepKind::Development => Some("dev"), - DepKind::Build => Some("build"), - } - .serialize(s) - } -} - -impl Dependency { - /// Attempt to create a `Dependency` from an entry in the manifest. - pub fn parse( - name: impl Into, - version: Option<&str>, - source_id: SourceId, - ) -> CargoResult { - let name = name.into(); - let (specified_req, version_req) = match version { - Some(v) => match VersionReq::parse(v) { - Ok(req) => (true, OptVersionReq::Req(req)), - Err(err) => { - return Err(anyhow::Error::new(err).context(format!( - "failed to parse the version requirement `{}` for dependency `{}`", - v, name, - ))) - } - }, - None => (false, OptVersionReq::Any), - }; - - let mut ret = Dependency::new_override(name, source_id); - { - let ptr = Rc::make_mut(&mut ret.inner); - ptr.only_match_name = false; - ptr.req = version_req; - ptr.specified_req = specified_req; - } - Ok(ret) - } - - pub fn new_override(name: InternedString, source_id: SourceId) -> Dependency { - assert!(!name.is_empty()); - Dependency { - inner: Rc::new(Inner { - name, - source_id, - registry_id: None, - req: OptVersionReq::Any, - kind: DepKind::Normal, - only_match_name: true, - optional: false, - public: false, - features: Vec::new(), - default_features: true, - specified_req: false, - platform: None, - explicit_name_in_toml: None, - }), - } - } - - pub fn version_req(&self) -> &OptVersionReq { - &self.inner.req - } - - /// This is the name of this `Dependency` as listed in `Cargo.toml`. - /// - /// Or in other words, this is what shows up in the `[dependencies]` section - /// on the left hand side. This is *not* the name of the package that's - /// being depended on as the dependency can be renamed. For that use - /// `package_name` below. - /// - /// Both of the dependencies below return `foo` for `name_in_toml`: - /// - /// ```toml - /// [dependencies] - /// foo = "0.1" - /// ``` - /// - /// and ... - /// - /// ```toml - /// [dependencies] - /// foo = { version = "0.1", package = 'bar' } - /// ``` - pub fn name_in_toml(&self) -> InternedString { - self.explicit_name_in_toml().unwrap_or(self.inner.name) - } - - /// The name of the package that this `Dependency` depends on. - /// - /// Usually this is what's written on the left hand side of a dependencies - /// section, but it can also be renamed via the `package` key. - /// - /// Both of the dependencies below return `foo` for `package_name`: - /// - /// ```toml - /// [dependencies] - /// foo = "0.1" - /// ``` - /// - /// and ... - /// - /// ```toml - /// [dependencies] - /// bar = { version = "0.1", package = 'foo' } - /// ``` - pub fn package_name(&self) -> InternedString { - self.inner.name - } - - pub fn source_id(&self) -> SourceId { - self.inner.source_id - } - - pub fn registry_id(&self) -> Option { - self.inner.registry_id - } - - pub fn set_registry_id(&mut self, registry_id: SourceId) -> &mut Dependency { - Rc::make_mut(&mut self.inner).registry_id = Some(registry_id); - self - } - - pub fn kind(&self) -> DepKind { - self.inner.kind - } - - pub fn is_public(&self) -> bool { - self.inner.public - } - - /// Sets whether the dependency is public. - pub fn set_public(&mut self, public: bool) -> &mut Dependency { - if public { - // Setting 'public' only makes sense for normal dependencies - assert_eq!(self.kind(), DepKind::Normal); - } - Rc::make_mut(&mut self.inner).public = public; - self - } - - pub fn specified_req(&self) -> bool { - self.inner.specified_req - } - - /// If none, this dependencies must be built for all platforms. - /// If some, it must only be built for the specified platform. - pub fn platform(&self) -> Option<&Platform> { - self.inner.platform.as_ref() - } - - /// The renamed name of this dependency, if any. - /// - /// If the `package` key is used in `Cargo.toml` then this returns the same - /// value as `name_in_toml`. - pub fn explicit_name_in_toml(&self) -> Option { - self.inner.explicit_name_in_toml - } - - pub fn set_kind(&mut self, kind: DepKind) -> &mut Dependency { - if self.is_public() { - // Setting 'public' only makes sense for normal dependencies - assert_eq!(kind, DepKind::Normal); - } - Rc::make_mut(&mut self.inner).kind = kind; - self - } - - /// Sets the list of features requested for the package. - pub fn set_features( - &mut self, - features: impl IntoIterator>, - ) -> &mut Dependency { - Rc::make_mut(&mut self.inner).features = features.into_iter().map(|s| s.into()).collect(); - self - } - - /// Sets whether the dependency requests default features of the package. - pub fn set_default_features(&mut self, default_features: bool) -> &mut Dependency { - Rc::make_mut(&mut self.inner).default_features = default_features; - self - } - - /// Sets whether the dependency is optional. - pub fn set_optional(&mut self, optional: bool) -> &mut Dependency { - Rc::make_mut(&mut self.inner).optional = optional; - self - } - - /// Sets the source ID for this dependency. - pub fn set_source_id(&mut self, id: SourceId) -> &mut Dependency { - Rc::make_mut(&mut self.inner).source_id = id; - self - } - - /// Sets the version requirement for this dependency. - pub fn set_version_req(&mut self, req: VersionReq) -> &mut Dependency { - Rc::make_mut(&mut self.inner).req = OptVersionReq::Req(req); - self - } - - pub fn set_platform(&mut self, platform: Option) -> &mut Dependency { - Rc::make_mut(&mut self.inner).platform = platform; - self - } - - pub fn set_explicit_name_in_toml( - &mut self, - name: impl Into, - ) -> &mut Dependency { - Rc::make_mut(&mut self.inner).explicit_name_in_toml = Some(name.into()); - self - } - - /// Locks this dependency to depending on the specified package ID. - pub fn lock_to(&mut self, id: PackageId) -> &mut Dependency { - assert_eq!(self.inner.source_id, id.source_id()); - trace!( - "locking dep from `{}` with `{}` at {} to {}", - self.package_name(), - self.version_req(), - self.source_id(), - id - ); - let me = Rc::make_mut(&mut self.inner); - me.req.lock_to(id.version()); - - // Only update the `precise` of this source to preserve other - // information about dependency's source which may not otherwise be - // tested during equality/hashing. - me.source_id = me - .source_id - .with_precise(id.source_id().precise().map(|s| s.to_string())); - self - } - - /// Locks this dependency to a specified version. - /// - /// Mainly used in dependency patching like `[patch]` or `[replace]`, which - /// doesn't need to lock the entire dependency to a specific [`PackageId`]. - pub fn lock_version(&mut self, version: &semver::Version) -> &mut Dependency { - let me = Rc::make_mut(&mut self.inner); - me.req.lock_to(version); - self - } - - /// Returns `true` if this is a "locked" dependency. Basically a locked - /// dependency has an exact version req, but not vice versa. - pub fn is_locked(&self) -> bool { - self.inner.req.is_locked() - } - - /// Returns `false` if the dependency is only used to build the local package. - pub fn is_transitive(&self) -> bool { - match self.inner.kind { - DepKind::Normal | DepKind::Build => true, - DepKind::Development => false, - } - } - - pub fn is_build(&self) -> bool { - matches!(self.inner.kind, DepKind::Build) - } - - pub fn is_optional(&self) -> bool { - self.inner.optional - } - - /// Returns `true` if the default features of the dependency are requested. - pub fn uses_default_features(&self) -> bool { - self.inner.default_features - } - /// Returns the list of features that are requested by the dependency. - pub fn features(&self) -> &[InternedString] { - &self.inner.features - } - - /// Returns `true` if the package (`sum`) can fulfill this dependency request. - pub fn matches(&self, sum: &Summary) -> bool { - self.matches_id(sum.package_id()) - } - - /// Returns `true` if the package (`id`) can fulfill this dependency request. - pub fn matches_ignoring_source(&self, id: PackageId) -> bool { - self.package_name() == id.name() && self.version_req().matches(id.version()) - } - - /// Returns `true` if the package (`id`) can fulfill this dependency request. - pub fn matches_id(&self, id: PackageId) -> bool { - self.inner.name == id.name() - && (self.inner.only_match_name - || (self.inner.req.matches(id.version()) && self.inner.source_id == id.source_id())) - } - - pub fn map_source(mut self, to_replace: SourceId, replace_with: SourceId) -> Dependency { - if self.source_id() == to_replace { - self.set_source_id(replace_with); - } - self - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/features.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/features.rs deleted file mode 100644 index 968290a38..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/features.rs +++ /dev/null @@ -1,994 +0,0 @@ -//! Support for nightly features in Cargo itself. -//! -//! This file is the version of `feature_gate.rs` in upstream Rust for Cargo -//! itself and is intended to be the avenue for which new features in Cargo are -//! gated by default and then eventually stabilized. All known stable and -//! unstable features are tracked in this file. -//! -//! If you're reading this then you're likely interested in adding a feature to -//! Cargo, and the good news is that it shouldn't be too hard! First determine -//! how the feature should be gated: -//! -//! * New syntax in Cargo.toml should use `cargo-features`. -//! * New CLI options should use `-Z unstable-options`. -//! * New functionality that may not have an interface, or the interface has -//! not yet been designed, or for more complex features that affect multiple -//! parts of Cargo should use a new `-Z` flag. -//! -//! See below for more details. -//! -//! When adding new tests for your feature, usually the tests should go into a -//! new module of the testsuite. See -//! for more information on -//! writing tests. Particularly, check out the "Testing Nightly Features" -//! section for testing unstable features. -//! -//! After you have added your feature, be sure to update the unstable -//! documentation at `src/doc/src/reference/unstable.md` to include a short -//! description of how to use your new feature. -//! -//! And hopefully that's it! -//! -//! ## New Cargo.toml syntax -//! -//! The steps for adding new Cargo.toml syntax are: -//! -//! 1. Add the cargo-features unstable gate. Search below for "look here" to -//! find the `features!` macro and add your feature to the list. -//! -//! 2. Update the Cargo.toml parsing code to handle your new feature. -//! -//! 3. Wherever you added the new parsing code, call -//! `features.require(Feature::my_feature_name())?` if the new syntax is -//! used. This will return an error if the user hasn't listed the feature -//! in `cargo-features` or this is not the nightly channel. -//! -//! ## `-Z unstable-options` -//! -//! `-Z unstable-options` is intended to force the user to opt-in to new CLI -//! flags, options, and new subcommands. -//! -//! The steps to add a new command-line option are: -//! -//! 1. Add the option to the CLI parsing code. In the help text, be sure to -//! include `(unstable)` to note that this is an unstable option. -//! 2. Where the CLI option is loaded, be sure to call -//! [`CliUnstable::fail_if_stable_opt`]. This will return an error if `-Z -//! unstable options` was not passed. -//! -//! ## `-Z` options -//! -//! The steps to add a new `-Z` option are: -//! -//! 1. Add the option to the [`CliUnstable`] struct below. Flags can take an -//! optional value if you want. -//! 2. Update the [`CliUnstable::add`][CliUnstable] function to parse the flag. -//! 3. Wherever the new functionality is implemented, call -//! [`Config::cli_unstable`][crate::util::config::Config::cli_unstable] to -//! get an instance of `CliUnstable` and check if the option has been -//! enabled on the `CliUnstable` instance. Nightly gating is already -//! handled, so no need to worry about that. -//! -//! ## Stabilization -//! -//! For the stabilization process, see -//! . -//! -//! The steps for stabilizing are roughly: -//! -//! 1. Update the feature to be stable, based on the kind of feature: -//! 1. `cargo-features`: Change the feature to `stable` in the `features!` -//! macro below, and include the version and a URL for the documentation. -//! 2. `-Z unstable-options`: Find the call to `fail_if_stable_opt` and -//! remove it. Be sure to update the man pages if necessary. -//! 3. `-Z` flag: Change the parsing code in [`CliUnstable::add`][CliUnstable] -//! to call `stabilized_warn` or `stabilized_err` and remove the field from -//! `CliUnstable. Remove the `(unstable)` note in the clap help text if -//! necessary. -//! 2. Remove `masquerade_as_nightly_cargo` from any tests, and remove -//! `cargo-features` from `Cargo.toml` test files if any. -//! 3. Update the docs in unstable.md to move the section to the bottom -//! and summarize it similar to the other entries. Update the rest of the -//! documentation to add the new feature. - -use std::collections::BTreeSet; -use std::env; -use std::fmt::{self, Write}; -use std::str::FromStr; - -use anyhow::{bail, Error}; -use cargo_util::ProcessBuilder; -use serde::{Deserialize, Serialize}; - -use crate::util::errors::CargoResult; -use crate::util::{indented_lines, iter_join}; -use crate::Config; - -pub const HIDDEN: &str = ""; -pub const SEE_CHANNELS: &str = - "See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information \ - about Rust release channels."; - -/// The edition of the compiler (RFC 2052) -#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, Eq, PartialEq, Serialize, Deserialize)] -pub enum Edition { - /// The 2015 edition - Edition2015, - /// The 2018 edition - Edition2018, - /// The 2021 edition - Edition2021, -} - -// Adding a new edition: -// - Add the next edition to the enum. -// - Update every match expression that now fails to compile. -// - Update the `FromStr` impl. -// - Update CLI_VALUES to include the new edition. -// - Set LATEST_UNSTABLE to Some with the new edition. -// - Add an unstable feature to the features! macro below for the new edition. -// - Gate on that new feature in TomlManifest::to_real_manifest. -// - Update the shell completion files. -// - Update any failing tests (hopefully there are very few). -// - Update unstable.md to add a new section for this new edition (see -// https://github.com/rust-lang/cargo/blob/3ebb5f15a940810f250b68821149387af583a79e/src/doc/src/reference/unstable.md?plain=1#L1238-L1264 -// as an example). -// -// Stabilization instructions: -// - Set LATEST_UNSTABLE to None. -// - Set LATEST_STABLE to the new version. -// - Update `is_stable` to `true`. -// - Set the editionNNNN feature to stable in the features macro below. -// - Update any tests that are affected. -// - Update the man page for the --edition flag. -// - Update unstable.md to move the edition section to the bottom. -// - Update the documentation: -// - Update any features impacted by the edition. -// - Update manifest.md#the-edition-field. -// - Update the --edition flag (options-new.md). -// - Rebuild man pages. -impl Edition { - /// The latest edition that is unstable. - /// - /// This is `None` if there is no next unstable edition. - pub const LATEST_UNSTABLE: Option = None; - /// The latest stable edition. - pub const LATEST_STABLE: Edition = Edition::Edition2021; - /// Possible values allowed for the `--edition` CLI flag. - /// - /// This requires a static value due to the way clap works, otherwise I - /// would have built this dynamically. - pub const CLI_VALUES: &'static [&'static str] = &["2015", "2018", "2021"]; - - /// Returns the first version that a particular edition was released on - /// stable. - pub(crate) fn first_version(&self) -> Option { - use Edition::*; - match self { - Edition2015 => None, - Edition2018 => Some(semver::Version::new(1, 31, 0)), - Edition2021 => Some(semver::Version::new(1, 56, 0)), - } - } - - /// Returns `true` if this edition is stable in this release. - pub fn is_stable(&self) -> bool { - use Edition::*; - match self { - Edition2015 => true, - Edition2018 => true, - Edition2021 => true, - } - } - - /// Returns the previous edition from this edition. - /// - /// Returns `None` for 2015. - pub fn previous(&self) -> Option { - use Edition::*; - match self { - Edition2015 => None, - Edition2018 => Some(Edition2015), - Edition2021 => Some(Edition2018), - } - } - - /// Returns the next edition from this edition, returning the last edition - /// if this is already the last one. - pub fn saturating_next(&self) -> Edition { - use Edition::*; - match self { - Edition2015 => Edition2018, - Edition2018 => Edition2021, - Edition2021 => Edition2021, - } - } - - /// Updates the given [`ProcessBuilder`] to include the appropriate flags - /// for setting the edition. - pub(crate) fn cmd_edition_arg(&self, cmd: &mut ProcessBuilder) { - if *self != Edition::Edition2015 { - cmd.arg(format!("--edition={}", self)); - } - if !self.is_stable() { - cmd.arg("-Z").arg("unstable-options"); - } - } - - /// Whether or not this edition supports the `rust_*_compatibility` lint. - /// - /// Ideally this would not be necessary, but editions may not have any - /// lints, and thus `rustc` doesn't recognize it. Perhaps `rustc` could - /// create an empty group instead? - pub(crate) fn supports_compat_lint(&self) -> bool { - use Edition::*; - match self { - Edition2015 => false, - Edition2018 => true, - Edition2021 => true, - } - } - - /// Whether or not this edition supports the `rust_*_idioms` lint. - /// - /// Ideally this would not be necessary... - pub(crate) fn supports_idiom_lint(&self) -> bool { - use Edition::*; - match self { - Edition2015 => false, - Edition2018 => true, - Edition2021 => false, - } - } -} - -impl fmt::Display for Edition { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - Edition::Edition2015 => f.write_str("2015"), - Edition::Edition2018 => f.write_str("2018"), - Edition::Edition2021 => f.write_str("2021"), - } - } -} -impl FromStr for Edition { - type Err = Error; - fn from_str(s: &str) -> Result { - match s { - "2015" => Ok(Edition::Edition2015), - "2018" => Ok(Edition::Edition2018), - "2021" => Ok(Edition::Edition2021), - s if s.parse().map_or(false, |y: u16| y > 2021 && y < 2050) => bail!( - "this version of Cargo is older than the `{}` edition, \ - and only supports `2015`, `2018`, and `2021` editions.", - s - ), - s => bail!( - "supported edition values are `2015`, `2018`, or `2021`, \ - but `{}` is unknown", - s - ), - } - } -} - -#[derive(PartialEq)] -enum Status { - Stable, - Unstable, - Removed, -} - -macro_rules! features { - ( - $(($stab:ident, $feature:ident, $version:expr, $docs:expr),)* - ) => ( - #[derive(Default, Clone, Debug)] - pub struct Features { - $($feature: bool,)* - activated: Vec, - nightly_features_allowed: bool, - is_local: bool, - } - - impl Feature { - $( - pub fn $feature() -> &'static Feature { - fn get(features: &Features) -> bool { - stab!($stab) == Status::Stable || features.$feature - } - static FEAT: Feature = Feature { - name: stringify!($feature), - stability: stab!($stab), - version: $version, - docs: $docs, - get, - }; - &FEAT - } - )* - - fn is_enabled(&self, features: &Features) -> bool { - (self.get)(features) - } - } - - impl Features { - fn status(&mut self, feature: &str) -> Option<(&mut bool, &'static Feature)> { - if feature.contains("_") { - return None - } - let feature = feature.replace("-", "_"); - $( - if feature == stringify!($feature) { - return Some((&mut self.$feature, Feature::$feature())) - } - )* - None - } - } - ) -} - -macro_rules! stab { - (stable) => { - Status::Stable - }; - (unstable) => { - Status::Unstable - }; - (removed) => { - Status::Removed - }; -} - -// A listing of all features in Cargo. -// -// "look here" -// -// This is the macro that lists all stable and unstable features in Cargo. -// You'll want to add to this macro whenever you add a feature to Cargo, also -// following the directions above. -// -// Note that all feature names here are valid Rust identifiers, but the `_` -// character is translated to `-` when specified in the `cargo-features` -// manifest entry in `Cargo.toml`. -features! { - // A dummy feature that doesn't actually gate anything, but it's used in - // testing to ensure that we can enable stable features. - (stable, test_dummy_stable, "1.0", ""), - - // A dummy feature that gates the usage of the `im-a-teapot` manifest - // entry. This is basically just intended for tests. - (unstable, test_dummy_unstable, "", "reference/unstable.html"), - - // Downloading packages from alternative registry indexes. - (stable, alternative_registries, "1.34", "reference/registries.html"), - - // Using editions - (stable, edition, "1.31", "reference/manifest.html#the-edition-field"), - - // Renaming a package in the manifest via the `package` key - (stable, rename_dependency, "1.31", "reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml"), - - // Whether a lock file is published with this crate - (removed, publish_lockfile, "1.37", "reference/unstable.html#publish-lockfile"), - - // Overriding profiles for dependencies. - (stable, profile_overrides, "1.41", "reference/profiles.html#overrides"), - - // "default-run" manifest option, - (stable, default_run, "1.37", "reference/manifest.html#the-default-run-field"), - - // Declarative build scripts. - (unstable, metabuild, "", "reference/unstable.html#metabuild"), - - // Specifying the 'public' attribute on dependencies - (unstable, public_dependency, "", "reference/unstable.html#public-dependency"), - - // Allow to specify profiles other than 'dev', 'release', 'test', etc. - (stable, named_profiles, "1.57", "reference/profiles.html#custom-profiles"), - - // Opt-in new-resolver behavior. - (stable, resolver, "1.51", "reference/resolver.html#resolver-versions"), - - // Allow to specify whether binaries should be stripped. - (stable, strip, "1.58", "reference/profiles.html#strip-option"), - - // Specifying a minimal 'rust-version' attribute for crates - (stable, rust_version, "1.56", "reference/manifest.html#the-rust-version-field"), - - // Support for 2021 edition. - (stable, edition2021, "1.56", "reference/manifest.html#the-edition-field"), - - // Allow to specify per-package targets (compile kinds) - (unstable, per_package_target, "", "reference/unstable.html#per-package-target"), - - // Allow to specify which codegen backend should be used. - (unstable, codegen_backend, "", "reference/unstable.html#codegen-backend"), - - // Allow specifying different binary name apart from the crate name - (unstable, different_binary_name, "", "reference/unstable.html#different-binary-name"), -} - -pub struct Feature { - name: &'static str, - stability: Status, - version: &'static str, - docs: &'static str, - get: fn(&Features) -> bool, -} - -impl Features { - pub fn new( - features: &[String], - config: &Config, - warnings: &mut Vec, - is_local: bool, - ) -> CargoResult { - let mut ret = Features::default(); - ret.nightly_features_allowed = config.nightly_features_allowed; - ret.is_local = is_local; - for feature in features { - ret.add(feature, config, warnings)?; - ret.activated.push(feature.to_string()); - } - Ok(ret) - } - - fn add( - &mut self, - feature_name: &str, - config: &Config, - warnings: &mut Vec, - ) -> CargoResult<()> { - let nightly_features_allowed = self.nightly_features_allowed; - let is_local = self.is_local; - let (slot, feature) = match self.status(feature_name) { - Some(p) => p, - None => bail!("unknown cargo feature `{}`", feature_name), - }; - - if *slot { - bail!( - "the cargo feature `{}` has already been activated", - feature_name - ); - } - - let see_docs = || { - let url_channel = match channel().as_str() { - "dev" | "nightly" => "nightly/", - "beta" => "beta/", - _ => "", - }; - format!( - "See https://doc.rust-lang.org/{}cargo/{} for more information \ - about using this feature.", - url_channel, feature.docs - ) - }; - - match feature.stability { - Status::Stable => { - // The user can't do anything about non-local packages. - // Warnings are usually suppressed, but just being cautious here. - if is_local { - let warning = format!( - "the cargo feature `{}` has been stabilized in the {} \ - release and is no longer necessary to be listed in the \ - manifest\n {}", - feature_name, - feature.version, - see_docs() - ); - warnings.push(warning); - } - } - Status::Unstable if !nightly_features_allowed => bail!( - "the cargo feature `{}` requires a nightly version of \ - Cargo, but this is the `{}` channel\n\ - {}\n{}", - feature_name, - channel(), - SEE_CHANNELS, - see_docs() - ), - Status::Unstable => { - if let Some(allow) = &config.cli_unstable().allow_features { - if !allow.contains(feature_name) { - bail!( - "the feature `{}` is not in the list of allowed features: [{}]", - feature_name, - iter_join(allow, ", "), - ); - } - } - } - Status::Removed => { - let mut msg = format!( - "the cargo feature `{}` has been removed in the {} release\n\n", - feature_name, feature.version - ); - if self.is_local { - drop(writeln!( - msg, - "Remove the feature from Cargo.toml to remove this error." - )); - } else { - drop(writeln!( - msg, - "This package cannot be used with this version of Cargo, \ - as the unstable feature `{}` is no longer supported.", - feature_name - )); - } - drop(writeln!(msg, "{}", see_docs())); - bail!(msg); - } - } - - *slot = true; - - Ok(()) - } - - pub fn activated(&self) -> &[String] { - &self.activated - } - - pub fn require(&self, feature: &Feature) -> CargoResult<()> { - if feature.is_enabled(self) { - return Ok(()); - } - let feature_name = feature.name.replace("_", "-"); - let mut msg = format!( - "feature `{}` is required\n\ - \n\ - The package requires the Cargo feature called `{}`, but \ - that feature is not stabilized in this version of Cargo ({}).\n\ - ", - feature_name, - feature_name, - crate::version(), - ); - - if self.nightly_features_allowed { - if self.is_local { - drop(writeln!( - msg, - "Consider adding `cargo-features = [\"{}\"]` \ - to the top of Cargo.toml (above the [package] table) \ - to tell Cargo you are opting in to use this unstable feature.", - feature_name - )); - } else { - drop(writeln!( - msg, - "Consider trying a more recent nightly release." - )); - } - } else { - drop(writeln!( - msg, - "Consider trying a newer version of Cargo \ - (this may require the nightly release)." - )); - } - drop(writeln!( - msg, - "See https://doc.rust-lang.org/nightly/cargo/{} for more information \ - about the status of this feature.", - feature.docs - )); - - bail!("{}", msg); - } - - pub fn is_enabled(&self, feature: &Feature) -> bool { - feature.is_enabled(self) - } -} - -macro_rules! unstable_cli_options { - ( - $( - $(#[$meta:meta])? - $element: ident: $ty: ty = ($help: expr ), - )* - ) => { - /// A parsed representation of all unstable flags that Cargo accepts. - /// - /// Cargo, like `rustc`, accepts a suite of `-Z` flags which are intended for - /// gating unstable functionality to Cargo. These flags are only available on - /// the nightly channel of Cargo. - #[derive(Default, Debug, Deserialize)] - #[serde(default, rename_all = "kebab-case")] - pub struct CliUnstable { - $( - $(#[$meta])? - pub $element: $ty - ),* - } - impl CliUnstable { - pub fn help() -> Vec<(&'static str, &'static str)> { - let fields = vec![$((stringify!($element), $help)),*]; - fields - } - } - } -} - -unstable_cli_options!( - // Permanently unstable features: - allow_features: Option> = ("Allow *only* the listed unstable features"), - print_im_a_teapot: bool= (HIDDEN), - - // All other unstable features. - // Please keep this list lexiographically ordered. - advanced_env: bool = (HIDDEN), - avoid_dev_deps: bool = ("Avoid installing dev-dependencies if possible"), - binary_dep_depinfo: bool = ("Track changes to dependency artifacts"), - #[serde(deserialize_with = "deserialize_build_std")] - build_std: Option> = ("Enable Cargo to compile the standard library itself as part of a crate graph compilation"), - build_std_features: Option> = ("Configure features enabled for the standard library itself when building the standard library"), - config_include: bool = ("Enable the `include` key in config files"), - credential_process: bool = ("Add a config setting to fetch registry authentication tokens by calling an external process"), - doctest_in_workspace: bool = ("Compile doctests with paths relative to the workspace root"), - doctest_xcompile: bool = ("Compile and run doctests for non-host target using runner config"), - dual_proc_macros: bool = ("Build proc-macros for both the host and the target"), - features: Option> = (HIDDEN), - jobserver_per_rustc: bool = (HIDDEN), - minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum"), - mtime_on_use: bool = ("Configure Cargo to update the mtime of used files"), - multitarget: bool = ("Allow passing multiple `--target` flags to the cargo subcommand selected"), - namespaced_features: bool = ("Allow features with `dep:` prefix"), - no_index_update: bool = ("Do not update the registry index even if the cache is outdated"), - panic_abort_tests: bool = ("Enable support to run tests with -Cpanic=abort"), - host_config: bool = ("Enable the [host] section in the .cargo/config.toml file"), - target_applies_to_host: bool = ("Enable the `target-applies-to-host` key in the .cargo/config.toml file"), - rustdoc_map: bool = ("Allow passing external documentation mappings to rustdoc"), - separate_nightlies: bool = (HIDDEN), - terminal_width: Option> = ("Provide a terminal width to rustc for error truncation"), - timings: Option> = ("Display concurrency information"), - unstable_options: bool = ("Allow the usage of unstable options"), - weak_dep_features: bool = ("Allow `dep_name?/feature` feature syntax"), - // TODO(wcrichto): move scrape example configuration into Cargo.toml before stabilization - // See: https://github.com/rust-lang/cargo/pull/9525#discussion_r728470927 - rustdoc_scrape_examples: Option = ("Allow rustdoc to scrape examples from reverse-dependencies for documentation"), - skip_rustdoc_fingerprint: bool = (HIDDEN), -); - -const STABILIZED_COMPILE_PROGRESS: &str = "The progress bar is now always \ - enabled when used on an interactive console.\n\ - See https://doc.rust-lang.org/cargo/reference/config.html#termprogresswhen \ - for information on controlling the progress bar."; - -const STABILIZED_OFFLINE: &str = "Offline mode is now available via the \ - --offline CLI option"; - -const STABILIZED_CACHE_MESSAGES: &str = "Message caching is now always enabled."; - -const STABILIZED_INSTALL_UPGRADE: &str = "Packages are now always upgraded if \ - they appear out of date.\n\ - See https://doc.rust-lang.org/cargo/commands/cargo-install.html for more \ - information on how upgrading works."; - -const STABILIZED_CONFIG_PROFILE: &str = "See \ - https://doc.rust-lang.org/cargo/reference/config.html#profile for more \ - information about specifying profiles in config."; - -const STABILIZED_CRATE_VERSIONS: &str = "The crate version is now \ - automatically added to the documentation."; - -const STABILIZED_PACKAGE_FEATURES: &str = "Enhanced feature flag behavior is now \ - available in virtual workspaces, and `member/feature-name` syntax is also \ - always available. Other extensions require setting `resolver = \"2\"` in \ - Cargo.toml.\n\ - See https://doc.rust-lang.org/nightly/cargo/reference/features.html#resolver-version-2-command-line-flags \ - for more information."; - -const STABILIZED_FEATURES: &str = "The new feature resolver is now available \ - by specifying `resolver = \"2\"` in Cargo.toml.\n\ - See https://doc.rust-lang.org/nightly/cargo/reference/features.html#feature-resolver-version-2 \ - for more information."; - -const STABILIZED_EXTRA_LINK_ARG: &str = "Additional linker arguments are now \ - supported without passing this flag."; - -const STABILIZED_CONFIGURABLE_ENV: &str = "The [env] section is now always enabled."; - -const STABILIZED_PATCH_IN_CONFIG: &str = "The patch-in-config feature is now always enabled."; - -const STABILIZED_NAMED_PROFILES: &str = "The named-profiles feature is now always enabled.\n\ - See https://doc.rust-lang.org/nightly/cargo/reference/profiles.html#custom-profiles \ - for more information"; - -const STABILIZED_FUTURE_INCOMPAT_REPORT: &str = - "The future-incompat-report feature is now always enabled."; - -fn deserialize_build_std<'de, D>(deserializer: D) -> Result>, D::Error> -where - D: serde::Deserializer<'de>, -{ - let crates = match >>::deserialize(deserializer)? { - Some(list) => list, - None => return Ok(None), - }; - let v = crates.join(","); - Ok(Some( - crate::core::compiler::standard_lib::parse_unstable_flag(Some(&v)), - )) -} - -impl CliUnstable { - pub fn parse( - &mut self, - flags: &[String], - nightly_features_allowed: bool, - ) -> CargoResult> { - if !flags.is_empty() && !nightly_features_allowed { - bail!( - "the `-Z` flag is only accepted on the nightly channel of Cargo, \ - but this is the `{}` channel\n\ - {}", - channel(), - SEE_CHANNELS - ); - } - let mut warnings = Vec::new(); - // We read flags twice, first to get allowed-features (if specified), - // and then to read the remaining unstable flags. - for flag in flags { - if flag.starts_with("allow-features=") { - self.add(flag, &mut warnings)?; - } - } - for flag in flags { - self.add(flag, &mut warnings)?; - } - Ok(warnings) - } - - fn add(&mut self, flag: &str, warnings: &mut Vec) -> CargoResult<()> { - let mut parts = flag.splitn(2, '='); - let k = parts.next().unwrap(); - let v = parts.next(); - - fn parse_bool(key: &str, value: Option<&str>) -> CargoResult { - match value { - None | Some("yes") => Ok(true), - Some("no") => Ok(false), - Some(s) => bail!("flag -Z{} expected `no` or `yes`, found: `{}`", key, s), - } - } - - fn parse_timings(value: Option<&str>) -> Vec { - match value { - None => vec!["html".to_string(), "info".to_string()], - Some(v) => v.split(',').map(|s| s.to_string()).collect(), - } - } - - fn parse_features(value: Option<&str>) -> Vec { - match value { - None => Vec::new(), - Some("") => Vec::new(), - Some(v) => v.split(',').map(|s| s.to_string()).collect(), - } - } - - // Asserts that there is no argument to the flag. - fn parse_empty(key: &str, value: Option<&str>) -> CargoResult { - if let Some(v) = value { - bail!("flag -Z{} does not take a value, found: `{}`", key, v); - } - Ok(true) - } - - fn parse_usize_opt(value: Option<&str>) -> CargoResult> { - Ok(match value { - Some(value) => match value.parse::() { - Ok(value) => Some(value), - Err(e) => bail!("expected a number, found: {}", e), - }, - None => None, - }) - } - - let mut stabilized_warn = |key: &str, version: &str, message: &str| { - warnings.push(format!( - "flag `-Z {}` has been stabilized in the {} release, \ - and is no longer necessary\n{}", - key, - version, - indented_lines(message) - )); - }; - - // Use this if the behavior now requires another mechanism to enable. - let stabilized_err = |key: &str, version: &str, message: &str| { - Err(anyhow::format_err!( - "flag `-Z {}` has been stabilized in the {} release\n{}", - key, - version, - indented_lines(message) - )) - }; - - if let Some(allowed) = &self.allow_features { - if k != "allow-features" && !allowed.contains(k) { - bail!( - "the feature `{}` is not in the list of allowed features: [{}]", - k, - iter_join(allowed, ", ") - ); - } - } - - match k { - "print-im-a-teapot" => self.print_im_a_teapot = parse_bool(k, v)?, - "allow-features" => self.allow_features = Some(parse_features(v).into_iter().collect()), - "unstable-options" => self.unstable_options = parse_empty(k, v)?, - "no-index-update" => self.no_index_update = parse_empty(k, v)?, - "avoid-dev-deps" => self.avoid_dev_deps = parse_empty(k, v)?, - "minimal-versions" => self.minimal_versions = parse_empty(k, v)?, - "advanced-env" => self.advanced_env = parse_empty(k, v)?, - "config-include" => self.config_include = parse_empty(k, v)?, - "dual-proc-macros" => self.dual_proc_macros = parse_empty(k, v)?, - // can also be set in .cargo/config or with and ENV - "mtime-on-use" => self.mtime_on_use = parse_empty(k, v)?, - "named-profiles" => stabilized_warn(k, "1.57", STABILIZED_NAMED_PROFILES), - "binary-dep-depinfo" => self.binary_dep_depinfo = parse_empty(k, v)?, - "build-std" => { - self.build_std = Some(crate::core::compiler::standard_lib::parse_unstable_flag(v)) - } - "build-std-features" => self.build_std_features = Some(parse_features(v)), - "timings" => self.timings = Some(parse_timings(v)), - "doctest-xcompile" => self.doctest_xcompile = parse_empty(k, v)?, - "doctest-in-workspace" => self.doctest_in_workspace = parse_empty(k, v)?, - "panic-abort-tests" => self.panic_abort_tests = parse_empty(k, v)?, - "jobserver-per-rustc" => self.jobserver_per_rustc = parse_empty(k, v)?, - "host-config" => self.host_config = parse_empty(k, v)?, - "target-applies-to-host" => self.target_applies_to_host = parse_empty(k, v)?, - "features" => { - // For now this is still allowed (there are still some - // unstable options like "compare"). This should be removed at - // some point, and migrate to a new -Z flag for any future - // things. - let feats = parse_features(v); - let stab_is_not_empty = feats.iter().any(|feat| { - matches!( - feat.as_str(), - "build_dep" | "host_dep" | "dev_dep" | "itarget" | "all" - ) - }); - if stab_is_not_empty || feats.is_empty() { - // Make this stabilized_err once -Zfeature support is removed. - stabilized_warn(k, "1.51", STABILIZED_FEATURES); - } - self.features = Some(feats); - } - "separate-nightlies" => self.separate_nightlies = parse_empty(k, v)?, - "multitarget" => self.multitarget = parse_empty(k, v)?, - "rustdoc-map" => self.rustdoc_map = parse_empty(k, v)?, - "terminal-width" => self.terminal_width = Some(parse_usize_opt(v)?), - "namespaced-features" => self.namespaced_features = parse_empty(k, v)?, - "weak-dep-features" => self.weak_dep_features = parse_empty(k, v)?, - "credential-process" => self.credential_process = parse_empty(k, v)?, - "rustdoc-scrape-examples" => { - if let Some(s) = v { - self.rustdoc_scrape_examples = Some(s.to_string()) - } else { - bail!( - r#"-Z rustdoc-scrape-examples must take "all" or "examples" as an argument"# - ) - } - } - "skip-rustdoc-fingerprint" => self.skip_rustdoc_fingerprint = parse_empty(k, v)?, - "compile-progress" => stabilized_warn(k, "1.30", STABILIZED_COMPILE_PROGRESS), - "offline" => stabilized_err(k, "1.36", STABILIZED_OFFLINE)?, - "cache-messages" => stabilized_warn(k, "1.40", STABILIZED_CACHE_MESSAGES), - "install-upgrade" => stabilized_warn(k, "1.41", STABILIZED_INSTALL_UPGRADE), - "config-profile" => stabilized_warn(k, "1.43", STABILIZED_CONFIG_PROFILE), - "crate-versions" => stabilized_warn(k, "1.47", STABILIZED_CRATE_VERSIONS), - "package-features" => stabilized_warn(k, "1.51", STABILIZED_PACKAGE_FEATURES), - "extra-link-arg" => stabilized_warn(k, "1.56", STABILIZED_EXTRA_LINK_ARG), - "configurable-env" => stabilized_warn(k, "1.56", STABILIZED_CONFIGURABLE_ENV), - "patch-in-config" => stabilized_warn(k, "1.56", STABILIZED_PATCH_IN_CONFIG), - "future-incompat-report" => { - stabilized_warn(k, "1.59.0", STABILIZED_FUTURE_INCOMPAT_REPORT) - } - _ => bail!("unknown `-Z` flag specified: {}", k), - } - - Ok(()) - } - - /// Generates an error if `-Z unstable-options` was not used for a new, - /// unstable command-line flag. - pub fn fail_if_stable_opt(&self, flag: &str, issue: u32) -> CargoResult<()> { - if !self.unstable_options { - let see = format!( - "See https://github.com/rust-lang/cargo/issues/{} for more \ - information about the `{}` flag.", - issue, flag - ); - // NOTE: a `config` isn't available here, check the channel directly - let channel = channel(); - if channel == "nightly" || channel == "dev" { - bail!( - "the `{}` flag is unstable, pass `-Z unstable-options` to enable it\n\ - {}", - flag, - see - ); - } else { - bail!( - "the `{}` flag is unstable, and only available on the nightly channel \ - of Cargo, but this is the `{}` channel\n\ - {}\n\ - {}", - flag, - channel, - SEE_CHANNELS, - see - ); - } - } - Ok(()) - } - - /// Generates an error if `-Z unstable-options` was not used for a new, - /// unstable subcommand. - pub fn fail_if_stable_command( - &self, - config: &Config, - command: &str, - issue: u32, - ) -> CargoResult<()> { - if self.unstable_options { - return Ok(()); - } - let see = format!( - "See https://github.com/rust-lang/cargo/issues/{} for more \ - information about the `cargo {}` command.", - issue, command - ); - if config.nightly_features_allowed { - bail!( - "the `cargo {}` command is unstable, pass `-Z unstable-options` to enable it\n\ - {}", - command, - see - ); - } else { - bail!( - "the `cargo {}` command is unstable, and only available on the \ - nightly channel of Cargo, but this is the `{}` channel\n\ - {}\n\ - {}", - command, - channel(), - SEE_CHANNELS, - see - ); - } - } -} - -/// Returns the current release channel ("stable", "beta", "nightly", "dev"). -pub fn channel() -> String { - if let Ok(override_channel) = env::var("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS") { - return override_channel; - } - if let Ok(staging) = env::var("RUSTC_BOOTSTRAP") { - if staging == "1" { - return "dev".to_string(); - } - } - crate::version() - .cfg_info - .map(|c| c.release_channel) - .unwrap_or_else(|| String::from("dev")) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/manifest.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/manifest.rs deleted file mode 100644 index b0bc0576a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/manifest.rs +++ /dev/null @@ -1,982 +0,0 @@ -use std::collections::{BTreeMap, HashMap}; -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::path::{Path, PathBuf}; -use std::rc::Rc; -use std::sync::Arc; - -use anyhow::Context as _; -use semver::Version; -use serde::ser; -use serde::Serialize; -use url::Url; - -use crate::core::compiler::{CompileKind, CrateType}; -use crate::core::resolver::ResolveBehavior; -use crate::core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; -use crate::core::{Edition, Feature, Features, WorkspaceConfig}; -use crate::util::errors::*; -use crate::util::interning::InternedString; -use crate::util::toml::{TomlManifest, TomlProfiles}; -use crate::util::{short_hash, Config, Filesystem}; - -pub enum EitherManifest { - Real(Manifest), - Virtual(VirtualManifest), -} - -/// Contains all the information about a package, as loaded from a `Cargo.toml`. -/// -/// This is deserialized using the [`TomlManifest`] type. -#[derive(Clone, Debug)] -pub struct Manifest { - summary: Summary, - targets: Vec, - default_kind: Option, - forced_kind: Option, - links: Option, - warnings: Warnings, - exclude: Vec, - include: Vec, - metadata: ManifestMetadata, - custom_metadata: Option, - profiles: Option, - publish: Option>, - replace: Vec<(PackageIdSpec, Dependency)>, - patch: HashMap>, - workspace: WorkspaceConfig, - original: Rc, - unstable_features: Features, - edition: Edition, - rust_version: Option, - im_a_teapot: Option, - default_run: Option, - metabuild: Option>, - resolve_behavior: Option, -} - -/// When parsing `Cargo.toml`, some warnings should silenced -/// if the manifest comes from a dependency. `ManifestWarning` -/// allows this delayed emission of warnings. -#[derive(Clone, Debug)] -pub struct DelayedWarning { - pub message: String, - pub is_critical: bool, -} - -#[derive(Clone, Debug)] -pub struct Warnings(Vec); - -#[derive(Clone, Debug)] -pub struct VirtualManifest { - replace: Vec<(PackageIdSpec, Dependency)>, - patch: HashMap>, - workspace: WorkspaceConfig, - profiles: Option, - warnings: Warnings, - features: Features, - resolve_behavior: Option, -} - -/// General metadata about a package which is just blindly uploaded to the -/// registry. -/// -/// Note that many of these fields can contain invalid values such as the -/// homepage, repository, documentation, or license. These fields are not -/// validated by cargo itself, but rather it is up to the registry when uploaded -/// to validate these fields. Cargo will itself accept any valid TOML -/// specification for these values. -#[derive(PartialEq, Clone, Debug)] -pub struct ManifestMetadata { - pub authors: Vec, - pub keywords: Vec, - pub categories: Vec, - pub license: Option, - pub license_file: Option, - pub description: Option, // Not in Markdown - pub readme: Option, // File, not contents - pub homepage: Option, // URL - pub repository: Option, // URL - pub documentation: Option, // URL - pub badges: BTreeMap>, - pub links: Option, -} - -#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub enum TargetKind { - Lib(Vec), - Bin, - Test, - Bench, - ExampleLib(Vec), - ExampleBin, - CustomBuild, -} - -impl ser::Serialize for TargetKind { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - use self::TargetKind::*; - match self { - Lib(kinds) => s.collect_seq(kinds.iter().map(|t| t.to_string())), - Bin => ["bin"].serialize(s), - ExampleBin | ExampleLib(_) => ["example"].serialize(s), - Test => ["test"].serialize(s), - CustomBuild => ["custom-build"].serialize(s), - Bench => ["bench"].serialize(s), - } - } -} - -impl fmt::Debug for TargetKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use self::TargetKind::*; - match *self { - Lib(ref kinds) => kinds.fmt(f), - Bin => "bin".fmt(f), - ExampleBin | ExampleLib(_) => "example".fmt(f), - Test => "test".fmt(f), - CustomBuild => "custom-build".fmt(f), - Bench => "bench".fmt(f), - } - } -} - -impl TargetKind { - pub fn description(&self) -> &'static str { - match self { - TargetKind::Lib(..) => "lib", - TargetKind::Bin => "bin", - TargetKind::Test => "integration-test", - TargetKind::ExampleBin | TargetKind::ExampleLib(..) => "example", - TargetKind::Bench => "bench", - TargetKind::CustomBuild => "build-script", - } - } - - /// Returns whether production of this artifact requires the object files - /// from dependencies to be available. - /// - /// This only returns `false` when all we're producing is an rlib, otherwise - /// it will return `true`. - pub fn requires_upstream_objects(&self) -> bool { - match self { - TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => { - kinds.iter().any(|k| k.requires_upstream_objects()) - } - _ => true, - } - } - - /// Returns the arguments suitable for `--crate-type` to pass to rustc. - pub fn rustc_crate_types(&self) -> Vec { - match self { - TargetKind::Lib(kinds) | TargetKind::ExampleLib(kinds) => kinds.clone(), - TargetKind::CustomBuild - | TargetKind::Bench - | TargetKind::Test - | TargetKind::ExampleBin - | TargetKind::Bin => vec![CrateType::Bin], - } - } -} - -/// Information about a binary, a library, an example, etc. that is part of the -/// package. -#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct Target { - inner: Arc, -} - -#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] -struct TargetInner { - kind: TargetKind, - name: String, - // Note that `bin_name` is used for the cargo-feature `different_binary_name` - bin_name: Option, - // Note that the `src_path` here is excluded from the `Hash` implementation - // as it's absolute currently and is otherwise a little too brittle for - // causing rebuilds. Instead the hash for the path that we send to the - // compiler is handled elsewhere. - src_path: TargetSourcePath, - required_features: Option>, - tested: bool, - benched: bool, - doc: bool, - doctest: bool, - harness: bool, // whether to use the test harness (--test) - for_host: bool, - proc_macro: bool, - edition: Edition, -} - -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] -pub enum TargetSourcePath { - Path(PathBuf), - Metabuild, -} - -impl TargetSourcePath { - pub fn path(&self) -> Option<&Path> { - match self { - TargetSourcePath::Path(path) => Some(path.as_ref()), - TargetSourcePath::Metabuild => None, - } - } - - pub fn is_path(&self) -> bool { - matches!(self, TargetSourcePath::Path(_)) - } -} - -impl Hash for TargetSourcePath { - fn hash(&self, _: &mut H) { - // ... - } -} - -impl fmt::Debug for TargetSourcePath { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - TargetSourcePath::Path(path) => path.fmt(f), - TargetSourcePath::Metabuild => "metabuild".fmt(f), - } - } -} - -impl From for TargetSourcePath { - fn from(path: PathBuf) -> Self { - assert!(path.is_absolute(), "`{}` is not absolute", path.display()); - TargetSourcePath::Path(path) - } -} - -#[derive(Serialize)] -struct SerializedTarget<'a> { - /// Is this a `--bin bin`, `--lib`, `--example ex`? - /// Serialized as a list of strings for historical reasons. - kind: &'a TargetKind, - /// Corresponds to `--crate-type` compiler attribute. - /// See - crate_types: Vec, - name: &'a str, - src_path: Option<&'a PathBuf>, - edition: &'a str, - #[serde(rename = "required-features", skip_serializing_if = "Option::is_none")] - required_features: Option>, - /// Whether docs should be built for the target via `cargo doc` - /// See - doc: bool, - doctest: bool, - /// Whether tests should be run for the target (`test` field in `Cargo.toml`) - test: bool, -} - -impl ser::Serialize for Target { - fn serialize(&self, s: S) -> Result { - let src_path = match self.src_path() { - TargetSourcePath::Path(p) => Some(p), - // Unfortunately getting the correct path would require access to - // target_dir, which is not available here. - TargetSourcePath::Metabuild => None, - }; - SerializedTarget { - kind: self.kind(), - crate_types: self.rustc_crate_types(), - name: self.name(), - src_path, - edition: &self.edition().to_string(), - required_features: self - .required_features() - .map(|rf| rf.iter().map(|s| s.as_str()).collect()), - doc: self.documented(), - doctest: self.doctested() && self.doctestable(), - test: self.tested(), - } - .serialize(s) - } -} - -impl fmt::Debug for Target { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.inner.fmt(f) - } -} - -compact_debug! { - impl fmt::Debug for TargetInner { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let (default, default_name) = { - match &self.kind { - TargetKind::Lib(kinds) => { - ( - Target::lib_target( - &self.name, - kinds.clone(), - self.src_path.path().unwrap().to_path_buf(), - self.edition, - ).inner, - format!("lib_target({:?}, {:?}, {:?}, {:?})", - self.name, kinds, self.src_path, self.edition), - ) - } - TargetKind::CustomBuild => { - match self.src_path { - TargetSourcePath::Path(ref path) => { - ( - Target::custom_build_target( - &self.name, - path.to_path_buf(), - self.edition, - ).inner, - format!("custom_build_target({:?}, {:?}, {:?})", - self.name, path, self.edition), - ) - } - TargetSourcePath::Metabuild => { - ( - Target::metabuild_target(&self.name).inner, - format!("metabuild_target({:?})", self.name), - ) - } - } - } - _ => ( - Target::new(self.src_path.clone(), self.edition).inner, - format!("with_path({:?}, {:?})", self.src_path, self.edition), - ), - } - }; - [debug_the_fields( - kind - name - bin_name - src_path - required_features - tested - benched - doc - doctest - harness - for_host - proc_macro - edition - )] - } - } -} - -impl Manifest { - pub fn new( - summary: Summary, - default_kind: Option, - forced_kind: Option, - targets: Vec, - exclude: Vec, - include: Vec, - links: Option, - metadata: ManifestMetadata, - custom_metadata: Option, - profiles: Option, - publish: Option>, - replace: Vec<(PackageIdSpec, Dependency)>, - patch: HashMap>, - workspace: WorkspaceConfig, - unstable_features: Features, - edition: Edition, - rust_version: Option, - im_a_teapot: Option, - default_run: Option, - original: Rc, - metabuild: Option>, - resolve_behavior: Option, - ) -> Manifest { - Manifest { - summary, - default_kind, - forced_kind, - targets, - warnings: Warnings::new(), - exclude, - include, - links, - metadata, - custom_metadata, - profiles, - publish, - replace, - patch, - workspace, - unstable_features, - edition, - rust_version, - original, - im_a_teapot, - default_run, - metabuild, - resolve_behavior, - } - } - - pub fn dependencies(&self) -> &[Dependency] { - self.summary.dependencies() - } - pub fn default_kind(&self) -> Option { - self.default_kind - } - pub fn forced_kind(&self) -> Option { - self.forced_kind - } - pub fn exclude(&self) -> &[String] { - &self.exclude - } - pub fn include(&self) -> &[String] { - &self.include - } - pub fn metadata(&self) -> &ManifestMetadata { - &self.metadata - } - pub fn name(&self) -> InternedString { - self.package_id().name() - } - pub fn package_id(&self) -> PackageId { - self.summary.package_id() - } - pub fn summary(&self) -> &Summary { - &self.summary - } - pub fn summary_mut(&mut self) -> &mut Summary { - &mut self.summary - } - pub fn targets(&self) -> &[Target] { - &self.targets - } - // It is used by cargo-c, please do not remove it - pub fn targets_mut(&mut self) -> &mut [Target] { - &mut self.targets - } - pub fn version(&self) -> &Version { - self.package_id().version() - } - pub fn warnings_mut(&mut self) -> &mut Warnings { - &mut self.warnings - } - pub fn warnings(&self) -> &Warnings { - &self.warnings - } - pub fn profiles(&self) -> Option<&TomlProfiles> { - self.profiles.as_ref() - } - pub fn publish(&self) -> &Option> { - &self.publish - } - pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { - &self.replace - } - pub fn original(&self) -> &TomlManifest { - &self.original - } - pub fn patch(&self) -> &HashMap> { - &self.patch - } - pub fn links(&self) -> Option<&str> { - self.links.as_deref() - } - - pub fn workspace_config(&self) -> &WorkspaceConfig { - &self.workspace - } - - /// Unstable, nightly features that are enabled in this manifest. - pub fn unstable_features(&self) -> &Features { - &self.unstable_features - } - - /// The style of resolver behavior to use, declared with the `resolver` field. - /// - /// Returns `None` if it is not specified. - pub fn resolve_behavior(&self) -> Option { - self.resolve_behavior - } - - pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Manifest { - Manifest { - summary: self.summary.map_source(to_replace, replace_with), - ..self - } - } - - pub fn feature_gate(&self) -> CargoResult<()> { - if self.im_a_teapot.is_some() { - self.unstable_features - .require(Feature::test_dummy_unstable()) - .with_context(|| { - "the `im-a-teapot` manifest key is unstable and may \ - not work properly in England" - })?; - } - - if self.default_kind.is_some() || self.forced_kind.is_some() { - self.unstable_features - .require(Feature::per_package_target()) - .with_context(|| { - "the `package.default-target` and `package.forced-target` \ - manifest keys are unstable and may not work properly" - })?; - } - - Ok(()) - } - - // Just a helper function to test out `-Z` flags on Cargo - pub fn print_teapot(&self, config: &Config) { - if let Some(teapot) = self.im_a_teapot { - if config.cli_unstable().print_im_a_teapot { - crate::drop_println!(config, "im-a-teapot = {}", teapot); - } - } - } - - pub fn edition(&self) -> Edition { - self.edition - } - - pub fn rust_version(&self) -> Option<&str> { - self.rust_version.as_deref() - } - - pub fn custom_metadata(&self) -> Option<&toml::Value> { - self.custom_metadata.as_ref() - } - - pub fn default_run(&self) -> Option<&str> { - self.default_run.as_deref() - } - - pub fn metabuild(&self) -> Option<&Vec> { - self.metabuild.as_ref() - } - - pub fn metabuild_path(&self, target_dir: Filesystem) -> PathBuf { - let hash = short_hash(&self.package_id()); - target_dir - .into_path_unlocked() - .join(".metabuild") - .join(format!("metabuild-{}-{}.rs", self.name(), hash)) - } -} - -impl VirtualManifest { - pub fn new( - replace: Vec<(PackageIdSpec, Dependency)>, - patch: HashMap>, - workspace: WorkspaceConfig, - profiles: Option, - features: Features, - resolve_behavior: Option, - ) -> VirtualManifest { - VirtualManifest { - replace, - patch, - workspace, - profiles, - warnings: Warnings::new(), - features, - resolve_behavior, - } - } - - pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { - &self.replace - } - - pub fn patch(&self) -> &HashMap> { - &self.patch - } - - pub fn workspace_config(&self) -> &WorkspaceConfig { - &self.workspace - } - - pub fn profiles(&self) -> Option<&TomlProfiles> { - self.profiles.as_ref() - } - - pub fn warnings_mut(&mut self) -> &mut Warnings { - &mut self.warnings - } - - pub fn warnings(&self) -> &Warnings { - &self.warnings - } - - pub fn unstable_features(&self) -> &Features { - &self.features - } - - /// The style of resolver behavior to use, declared with the `resolver` field. - /// - /// Returns `None` if it is not specified. - pub fn resolve_behavior(&self) -> Option { - self.resolve_behavior - } -} - -impl Target { - fn new(src_path: TargetSourcePath, edition: Edition) -> Target { - Target { - inner: Arc::new(TargetInner { - kind: TargetKind::Bin, - name: String::new(), - bin_name: None, - src_path, - required_features: None, - doc: false, - doctest: false, - harness: true, - for_host: false, - proc_macro: false, - edition, - tested: true, - benched: true, - }), - } - } - - fn with_path(src_path: PathBuf, edition: Edition) -> Target { - Target::new(TargetSourcePath::from(src_path), edition) - } - - pub fn lib_target( - name: &str, - crate_targets: Vec, - src_path: PathBuf, - edition: Edition, - ) -> Target { - let mut target = Target::with_path(src_path, edition); - target - .set_kind(TargetKind::Lib(crate_targets)) - .set_name(name) - .set_doctest(true) - .set_doc(true); - target - } - - pub fn bin_target( - name: &str, - bin_name: Option, - src_path: PathBuf, - required_features: Option>, - edition: Edition, - ) -> Target { - let mut target = Target::with_path(src_path, edition); - target - .set_kind(TargetKind::Bin) - .set_name(name) - .set_binary_name(bin_name) - .set_required_features(required_features) - .set_doc(true); - target - } - - /// Builds a `Target` corresponding to the `build = "build.rs"` entry. - pub fn custom_build_target(name: &str, src_path: PathBuf, edition: Edition) -> Target { - let mut target = Target::with_path(src_path, edition); - target - .set_kind(TargetKind::CustomBuild) - .set_name(name) - .set_for_host(true) - .set_benched(false) - .set_tested(false); - target - } - - pub fn metabuild_target(name: &str) -> Target { - let mut target = Target::new(TargetSourcePath::Metabuild, Edition::Edition2018); - target - .set_kind(TargetKind::CustomBuild) - .set_name(name) - .set_for_host(true) - .set_benched(false) - .set_tested(false); - target - } - - pub fn example_target( - name: &str, - crate_targets: Vec, - src_path: PathBuf, - required_features: Option>, - edition: Edition, - ) -> Target { - let kind = if crate_targets.is_empty() || crate_targets.iter().all(|t| *t == CrateType::Bin) - { - TargetKind::ExampleBin - } else { - TargetKind::ExampleLib(crate_targets) - }; - let mut target = Target::with_path(src_path, edition); - target - .set_kind(kind) - .set_name(name) - .set_required_features(required_features) - .set_tested(false) - .set_benched(false); - target - } - - pub fn test_target( - name: &str, - src_path: PathBuf, - required_features: Option>, - edition: Edition, - ) -> Target { - let mut target = Target::with_path(src_path, edition); - target - .set_kind(TargetKind::Test) - .set_name(name) - .set_required_features(required_features) - .set_benched(false); - target - } - - pub fn bench_target( - name: &str, - src_path: PathBuf, - required_features: Option>, - edition: Edition, - ) -> Target { - let mut target = Target::with_path(src_path, edition); - target - .set_kind(TargetKind::Bench) - .set_name(name) - .set_required_features(required_features) - .set_tested(false); - target - } - - pub fn name(&self) -> &str { - &self.inner.name - } - pub fn crate_name(&self) -> String { - self.name().replace("-", "_") - } - pub fn src_path(&self) -> &TargetSourcePath { - &self.inner.src_path - } - pub fn set_src_path(&mut self, src_path: TargetSourcePath) { - Arc::make_mut(&mut self.inner).src_path = src_path; - } - pub fn required_features(&self) -> Option<&Vec> { - self.inner.required_features.as_ref() - } - pub fn kind(&self) -> &TargetKind { - &self.inner.kind - } - pub fn tested(&self) -> bool { - self.inner.tested - } - pub fn harness(&self) -> bool { - self.inner.harness - } - pub fn documented(&self) -> bool { - self.inner.doc - } - // A plugin, proc-macro, or build-script. - pub fn for_host(&self) -> bool { - self.inner.for_host - } - pub fn proc_macro(&self) -> bool { - self.inner.proc_macro - } - pub fn edition(&self) -> Edition { - self.inner.edition - } - pub fn benched(&self) -> bool { - self.inner.benched - } - pub fn doctested(&self) -> bool { - self.inner.doctest - } - - pub fn doctestable(&self) -> bool { - match self.kind() { - TargetKind::Lib(ref kinds) => kinds.iter().any(|k| { - *k == CrateType::Rlib || *k == CrateType::Lib || *k == CrateType::ProcMacro - }), - _ => false, - } - } - - pub fn is_lib(&self) -> bool { - matches!(self.kind(), TargetKind::Lib(_)) - } - - pub fn is_dylib(&self) -> bool { - match self.kind() { - TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Dylib), - _ => false, - } - } - - pub fn is_cdylib(&self) -> bool { - match self.kind() { - TargetKind::Lib(libs) => libs.iter().any(|l| *l == CrateType::Cdylib), - _ => false, - } - } - - /// Returns whether this target produces an artifact which can be linked - /// into a Rust crate. - /// - /// This only returns true for certain kinds of libraries. - pub fn is_linkable(&self) -> bool { - match self.kind() { - TargetKind::Lib(kinds) => kinds.iter().any(|k| k.is_linkable()), - _ => false, - } - } - - pub fn is_bin(&self) -> bool { - *self.kind() == TargetKind::Bin - } - - pub fn is_example(&self) -> bool { - matches!( - self.kind(), - TargetKind::ExampleBin | TargetKind::ExampleLib(..) - ) - } - - /// Returns `true` if it is a binary or executable example. - /// NOTE: Tests are `false`! - pub fn is_executable(&self) -> bool { - self.is_bin() || self.is_exe_example() - } - - /// Returns `true` if it is an executable example. - pub fn is_exe_example(&self) -> bool { - // Needed for --all-examples in contexts where only runnable examples make sense - matches!(self.kind(), TargetKind::ExampleBin) - } - - pub fn is_test(&self) -> bool { - *self.kind() == TargetKind::Test - } - pub fn is_bench(&self) -> bool { - *self.kind() == TargetKind::Bench - } - pub fn is_custom_build(&self) -> bool { - *self.kind() == TargetKind::CustomBuild - } - - /// Returns the arguments suitable for `--crate-type` to pass to rustc. - pub fn rustc_crate_types(&self) -> Vec { - self.kind().rustc_crate_types() - } - - pub fn set_tested(&mut self, tested: bool) -> &mut Target { - Arc::make_mut(&mut self.inner).tested = tested; - self - } - pub fn set_benched(&mut self, benched: bool) -> &mut Target { - Arc::make_mut(&mut self.inner).benched = benched; - self - } - pub fn set_doctest(&mut self, doctest: bool) -> &mut Target { - Arc::make_mut(&mut self.inner).doctest = doctest; - self - } - pub fn set_for_host(&mut self, for_host: bool) -> &mut Target { - Arc::make_mut(&mut self.inner).for_host = for_host; - self - } - pub fn set_proc_macro(&mut self, proc_macro: bool) -> &mut Target { - Arc::make_mut(&mut self.inner).proc_macro = proc_macro; - self - } - pub fn set_edition(&mut self, edition: Edition) -> &mut Target { - Arc::make_mut(&mut self.inner).edition = edition; - self - } - pub fn set_harness(&mut self, harness: bool) -> &mut Target { - Arc::make_mut(&mut self.inner).harness = harness; - self - } - pub fn set_doc(&mut self, doc: bool) -> &mut Target { - Arc::make_mut(&mut self.inner).doc = doc; - self - } - pub fn set_kind(&mut self, kind: TargetKind) -> &mut Target { - Arc::make_mut(&mut self.inner).kind = kind; - self - } - pub fn set_name(&mut self, name: &str) -> &mut Target { - Arc::make_mut(&mut self.inner).name = name.to_string(); - self - } - pub fn set_binary_name(&mut self, bin_name: Option) -> &mut Target { - Arc::make_mut(&mut self.inner).bin_name = bin_name; - self - } - pub fn set_required_features(&mut self, required_features: Option>) -> &mut Target { - Arc::make_mut(&mut self.inner).required_features = required_features; - self - } - pub fn binary_filename(&self) -> Option { - self.inner.bin_name.clone() - } - pub fn description_named(&self) -> String { - match self.kind() { - TargetKind::Lib(..) => "lib".to_string(), - TargetKind::Bin => format!("bin \"{}\"", self.name()), - TargetKind::Test => format!("test \"{}\"", self.name()), - TargetKind::Bench => format!("bench \"{}\"", self.name()), - TargetKind::ExampleLib(..) | TargetKind::ExampleBin => { - format!("example \"{}\"", self.name()) - } - TargetKind::CustomBuild => "build script".to_string(), - } - } -} - -impl fmt::Display for Target { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.kind() { - TargetKind::Lib(..) => write!(f, "Target(lib)"), - TargetKind::Bin => write!(f, "Target(bin: {})", self.name()), - TargetKind::Test => write!(f, "Target(test: {})", self.name()), - TargetKind::Bench => write!(f, "Target(bench: {})", self.name()), - TargetKind::ExampleBin | TargetKind::ExampleLib(..) => { - write!(f, "Target(example: {})", self.name()) - } - TargetKind::CustomBuild => write!(f, "Target(script)"), - } - } -} - -impl Warnings { - fn new() -> Warnings { - Warnings(Vec::new()) - } - - pub fn add_warning(&mut self, s: String) { - self.0.push(DelayedWarning { - message: s, - is_critical: false, - }) - } - - pub fn add_critical_warning(&mut self, s: String) { - self.0.push(DelayedWarning { - message: s, - is_critical: true, - }) - } - - pub fn warnings(&self) -> &[DelayedWarning] { - &self.0 - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/mod.rs deleted file mode 100644 index aec49b143..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/mod.rs +++ /dev/null @@ -1,28 +0,0 @@ -pub use self::dependency::Dependency; -pub use self::features::{CliUnstable, Edition, Feature, Features}; -pub use self::manifest::{EitherManifest, VirtualManifest}; -pub use self::manifest::{Manifest, Target, TargetKind}; -pub use self::package::{Package, PackageSet}; -pub use self::package_id::PackageId; -pub use self::package_id_spec::PackageIdSpec; -pub use self::registry::Registry; -pub use self::resolver::{Resolve, ResolveVersion}; -pub use self::shell::{Shell, Verbosity}; -pub use self::source::{GitReference, Source, SourceId, SourceMap}; -pub use self::summary::{FeatureMap, FeatureValue, Summary}; -pub use self::workspace::{MaybePackage, Workspace, WorkspaceConfig, WorkspaceRootConfig}; - -pub mod compiler; -pub mod dependency; -pub mod features; -pub mod manifest; -pub mod package; -pub mod package_id; -mod package_id_spec; -pub mod profiles; -pub mod registry; -pub mod resolver; -pub mod shell; -pub mod source; -pub mod summary; -mod workspace; diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package.rs deleted file mode 100644 index dd73ea25c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package.rs +++ /dev/null @@ -1,1186 +0,0 @@ -use std::cell::{Cell, Ref, RefCell, RefMut}; -use std::cmp::Ordering; -use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; -use std::fmt; -use std::hash; -use std::mem; -use std::path::{Path, PathBuf}; -use std::rc::Rc; -use std::time::{Duration, Instant}; - -use anyhow::Context; -use bytesize::ByteSize; -use curl::easy::{Easy, HttpVersion}; -use curl::multi::{EasyHandle, Multi}; -use lazycell::LazyCell; -use log::{debug, warn}; -use semver::Version; -use serde::Serialize; - -use crate::core::compiler::{CompileKind, RustcTargetData}; -use crate::core::dependency::DepKind; -use crate::core::resolver::features::ForceAllTargets; -use crate::core::resolver::{HasDevUnits, Resolve}; -use crate::core::source::MaybePackage; -use crate::core::{Dependency, Manifest, PackageId, SourceId, Target}; -use crate::core::{SourceMap, Summary, Workspace}; -use crate::ops; -use crate::util::config::PackageCacheLock; -use crate::util::errors::{CargoResult, HttpNot200}; -use crate::util::interning::InternedString; -use crate::util::network::Retry; -use crate::util::{self, internal, Config, Progress, ProgressStyle}; - -pub const MANIFEST_PREAMBLE: &str = "\ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# \"normalize\" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. -"; - -/// Information about a package that is available somewhere in the file system. -/// -/// A package is a `Cargo.toml` file plus all the files that are part of it. -// -// TODO: is `manifest_path` a relic? -#[derive(Clone)] -pub struct Package { - inner: Rc, -} - -#[derive(Clone)] -struct PackageInner { - /// The package's manifest. - manifest: Manifest, - /// The root of the package. - manifest_path: PathBuf, -} - -impl Ord for Package { - fn cmp(&self, other: &Package) -> Ordering { - self.package_id().cmp(&other.package_id()) - } -} - -impl PartialOrd for Package { - fn partial_cmp(&self, other: &Package) -> Option { - Some(self.cmp(other)) - } -} - -/// A Package in a form where `Serialize` can be derived. -#[derive(Serialize)] -pub struct SerializedPackage { - name: InternedString, - version: Version, - id: PackageId, - license: Option, - license_file: Option, - description: Option, - source: SourceId, - dependencies: Vec, - targets: Vec, - features: BTreeMap>, - manifest_path: PathBuf, - metadata: Option, - publish: Option>, - authors: Vec, - categories: Vec, - keywords: Vec, - readme: Option, - repository: Option, - homepage: Option, - documentation: Option, - edition: String, - links: Option, - #[serde(skip_serializing_if = "Option::is_none")] - metabuild: Option>, - default_run: Option, - rust_version: Option, -} - -impl Package { - /// Creates a package from a manifest and its location. - pub fn new(manifest: Manifest, manifest_path: &Path) -> Package { - Package { - inner: Rc::new(PackageInner { - manifest, - manifest_path: manifest_path.to_path_buf(), - }), - } - } - - /// Gets the manifest dependencies. - pub fn dependencies(&self) -> &[Dependency] { - self.manifest().dependencies() - } - /// Gets the manifest. - pub fn manifest(&self) -> &Manifest { - &self.inner.manifest - } - /// Gets the manifest. - pub fn manifest_mut(&mut self) -> &mut Manifest { - &mut Rc::make_mut(&mut self.inner).manifest - } - /// Gets the path to the manifest. - pub fn manifest_path(&self) -> &Path { - &self.inner.manifest_path - } - /// Gets the name of the package. - pub fn name(&self) -> InternedString { - self.package_id().name() - } - /// Gets the `PackageId` object for the package (fully defines a package). - pub fn package_id(&self) -> PackageId { - self.manifest().package_id() - } - /// Gets the root folder of the package. - pub fn root(&self) -> &Path { - self.manifest_path().parent().unwrap() - } - /// Gets the summary for the package. - pub fn summary(&self) -> &Summary { - self.manifest().summary() - } - /// Gets the targets specified in the manifest. - pub fn targets(&self) -> &[Target] { - self.manifest().targets() - } - /// Gets the library crate for this package, if it exists. - pub fn library(&self) -> Option<&Target> { - self.targets().iter().find(|t| t.is_lib()) - } - /// Gets the current package version. - pub fn version(&self) -> &Version { - self.package_id().version() - } - /// Gets the package authors. - pub fn authors(&self) -> &Vec { - &self.manifest().metadata().authors - } - - /// Returns `None` if the package is set to publish. - /// Returns `Some(allowed_registries)` if publishing is limited to specified - /// registries or if package is set to not publish. - pub fn publish(&self) -> &Option> { - self.manifest().publish() - } - /// Returns `true` if this package is a proc-macro. - pub fn proc_macro(&self) -> bool { - self.targets().iter().any(|target| target.proc_macro()) - } - /// Gets the package's minimum Rust version. - pub fn rust_version(&self) -> Option<&str> { - self.manifest().rust_version() - } - - /// Returns `true` if the package uses a custom build script for any target. - pub fn has_custom_build(&self) -> bool { - self.targets().iter().any(|t| t.is_custom_build()) - } - - pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Package { - Package { - inner: Rc::new(PackageInner { - manifest: self.manifest().clone().map_source(to_replace, replace_with), - manifest_path: self.manifest_path().to_owned(), - }), - } - } - - pub fn to_registry_toml(&self, ws: &Workspace<'_>) -> CargoResult { - let manifest = self - .manifest() - .original() - .prepare_for_publish(ws, self.root())?; - let toml = toml::to_string(&manifest)?; - Ok(format!("{}\n{}", MANIFEST_PREAMBLE, toml)) - } - - /// Returns if package should include `Cargo.lock`. - pub fn include_lockfile(&self) -> bool { - self.targets().iter().any(|t| t.is_example() || t.is_bin()) - } - - pub fn serialized(&self, config: &Config) -> SerializedPackage { - let summary = self.manifest().summary(); - let package_id = summary.package_id(); - let manmeta = self.manifest().metadata(); - // Filter out metabuild targets. They are an internal implementation - // detail that is probably not relevant externally. There's also not a - // real path to show in `src_path`, and this avoids changing the format. - let targets: Vec = self - .manifest() - .targets() - .iter() - .filter(|t| t.src_path().is_path()) - .cloned() - .collect(); - let features = if config.cli_unstable().namespaced_features { - // Convert Vec to Vec - summary - .features() - .iter() - .map(|(k, v)| { - ( - *k, - v.iter() - .map(|fv| InternedString::new(&fv.to_string())) - .collect(), - ) - }) - .collect() - } else { - self.manifest() - .original() - .features() - .cloned() - .unwrap_or_default() - }; - - SerializedPackage { - name: package_id.name(), - version: package_id.version().clone(), - id: package_id, - license: manmeta.license.clone(), - license_file: manmeta.license_file.clone(), - description: manmeta.description.clone(), - source: summary.source_id(), - dependencies: summary.dependencies().to_vec(), - targets, - features, - manifest_path: self.manifest_path().to_path_buf(), - metadata: self.manifest().custom_metadata().cloned(), - authors: manmeta.authors.clone(), - categories: manmeta.categories.clone(), - keywords: manmeta.keywords.clone(), - readme: manmeta.readme.clone(), - repository: manmeta.repository.clone(), - homepage: manmeta.homepage.clone(), - documentation: manmeta.documentation.clone(), - edition: self.manifest().edition().to_string(), - links: self.manifest().links().map(|s| s.to_owned()), - metabuild: self.manifest().metabuild().cloned(), - publish: self.publish().as_ref().cloned(), - default_run: self.manifest().default_run().map(|s| s.to_owned()), - rust_version: self.rust_version().map(|s| s.to_owned()), - } - } -} - -impl fmt::Display for Package { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.summary().package_id()) - } -} - -impl fmt::Debug for Package { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Package") - .field("id", &self.summary().package_id()) - .field("..", &"..") - .finish() - } -} - -impl PartialEq for Package { - fn eq(&self, other: &Package) -> bool { - self.package_id() == other.package_id() - } -} - -impl Eq for Package {} - -impl hash::Hash for Package { - fn hash(&self, into: &mut H) { - self.package_id().hash(into) - } -} - -/// A set of packages, with the intent to download. -/// -/// This is primarily used to convert a set of `PackageId`s to `Package`s. It -/// will download as needed, or used the cached download if available. -pub struct PackageSet<'cfg> { - packages: HashMap>, - sources: RefCell>, - config: &'cfg Config, - multi: Multi, - /// Used to prevent reusing the PackageSet to download twice. - downloading: Cell, - /// Whether or not to use curl HTTP/2 multiplexing. - multiplexing: bool, -} - -/// Helper for downloading crates. -pub struct Downloads<'a, 'cfg> { - set: &'a PackageSet<'cfg>, - /// When a download is started, it is added to this map. The key is a - /// "token" (see `Download::token`). It is removed once the download is - /// finished. - pending: HashMap, EasyHandle)>, - /// Set of packages currently being downloaded. This should stay in sync - /// with `pending`. - pending_ids: HashSet, - /// The final result of each download. A pair `(token, result)`. This is a - /// temporary holding area, needed because curl can report multiple - /// downloads at once, but the main loop (`wait`) is written to only - /// handle one at a time. - results: Vec<(usize, Result<(), curl::Error>)>, - /// The next ID to use for creating a token (see `Download::token`). - next: usize, - /// Progress bar. - progress: RefCell>>, - /// Number of downloads that have successfully finished. - downloads_finished: usize, - /// Total bytes for all successfully downloaded packages. - downloaded_bytes: u64, - /// Size (in bytes) and package name of the largest downloaded package. - largest: (u64, String), - /// Time when downloading started. - start: Instant, - /// Indicates *all* downloads were successful. - success: bool, - - /// Timeout management, both of timeout thresholds as well as whether or not - /// our connection has timed out (and accompanying message if it has). - /// - /// Note that timeout management is done manually here instead of in libcurl - /// because we want to apply timeouts to an entire batch of operations, not - /// any one particular single operation. - timeout: ops::HttpTimeout, - /// Last time bytes were received. - updated_at: Cell, - /// This is a slow-speed check. It is reset to `now + timeout_duration` - /// every time at least `threshold` bytes are received. If the current - /// time ever exceeds `next_speed_check`, then give up and report a - /// timeout error. - next_speed_check: Cell, - /// This is the slow-speed threshold byte count. It starts at the - /// configured threshold value (default 10), and is decremented by the - /// number of bytes received in each chunk. If it is <= zero, the - /// threshold has been met and data is being received fast enough not to - /// trigger a timeout; reset `next_speed_check` and set this back to the - /// configured threshold. - next_speed_check_bytes_threshold: Cell, - /// Global filesystem lock to ensure only one Cargo is downloading at a - /// time. - _lock: PackageCacheLock<'cfg>, -} - -struct Download<'cfg> { - /// The token for this download, used as the key of the `Downloads::pending` map - /// and stored in `EasyHandle` as well. - token: usize, - - /// The package that we're downloading. - id: PackageId, - - /// Actual downloaded data, updated throughout the lifetime of this download. - data: RefCell>, - - /// The URL that we're downloading from, cached here for error messages and - /// reenqueuing. - url: String, - - /// A descriptive string to print when we've finished downloading this crate. - descriptor: String, - - /// Statistics updated from the progress callback in libcurl. - total: Cell, - current: Cell, - - /// The moment we started this transfer at. - start: Instant, - timed_out: Cell>, - - /// Logic used to track retrying this download if it's a spurious failure. - retry: Retry<'cfg>, -} - -impl<'cfg> PackageSet<'cfg> { - pub fn new( - package_ids: &[PackageId], - sources: SourceMap<'cfg>, - config: &'cfg Config, - ) -> CargoResult> { - // We've enabled the `http2` feature of `curl` in Cargo, so treat - // failures here as fatal as it would indicate a build-time problem. - // - // Note that the multiplexing support is pretty new so we're having it - // off-by-default temporarily. - // - // Also note that pipelining is disabled as curl authors have indicated - // that it's buggy, and we've empirically seen that it's buggy with HTTP - // proxies. - let mut multi = Multi::new(); - let multiplexing = config.http_config()?.multiplexing.unwrap_or(true); - multi - .pipelining(false, multiplexing) - .with_context(|| "failed to enable multiplexing/pipelining in curl")?; - - // let's not flood crates.io with connections - multi.set_max_host_connections(2)?; - - Ok(PackageSet { - packages: package_ids - .iter() - .map(|&id| (id, LazyCell::new())) - .collect(), - sources: RefCell::new(sources), - config, - multi, - downloading: Cell::new(false), - multiplexing, - }) - } - - pub fn package_ids(&self) -> impl Iterator + '_ { - self.packages.keys().cloned() - } - - pub fn packages(&self) -> impl Iterator { - self.packages.values().filter_map(|p| p.borrow()) - } - - pub fn enable_download<'a>(&'a self) -> CargoResult> { - assert!(!self.downloading.replace(true)); - let timeout = ops::HttpTimeout::new(self.config)?; - Ok(Downloads { - start: Instant::now(), - set: self, - next: 0, - pending: HashMap::new(), - pending_ids: HashSet::new(), - results: Vec::new(), - progress: RefCell::new(Some(Progress::with_style( - "Downloading", - ProgressStyle::Ratio, - self.config, - ))), - downloads_finished: 0, - downloaded_bytes: 0, - largest: (0, String::new()), - success: false, - updated_at: Cell::new(Instant::now()), - timeout, - next_speed_check: Cell::new(Instant::now()), - next_speed_check_bytes_threshold: Cell::new(0), - _lock: self.config.acquire_package_cache_lock()?, - }) - } - - pub fn get_one(&self, id: PackageId) -> CargoResult<&Package> { - if let Some(pkg) = self.packages.get(&id).and_then(|slot| slot.borrow()) { - return Ok(pkg); - } - Ok(self.get_many(Some(id))?.remove(0)) - } - - pub fn get_many(&self, ids: impl IntoIterator) -> CargoResult> { - let mut pkgs = Vec::new(); - let mut downloads = self.enable_download()?; - for id in ids { - pkgs.extend(downloads.start(id)?); - } - while downloads.remaining() > 0 { - pkgs.push(downloads.wait()?); - } - downloads.success = true; - Ok(pkgs) - } - - /// Downloads any packages accessible from the give root ids. - pub fn download_accessible( - &self, - resolve: &Resolve, - root_ids: &[PackageId], - has_dev_units: HasDevUnits, - requested_kinds: &[CompileKind], - target_data: &RustcTargetData<'cfg>, - force_all_targets: ForceAllTargets, - ) -> CargoResult<()> { - fn collect_used_deps( - used: &mut BTreeSet, - resolve: &Resolve, - pkg_id: PackageId, - has_dev_units: HasDevUnits, - requested_kinds: &[CompileKind], - target_data: &RustcTargetData<'_>, - force_all_targets: ForceAllTargets, - ) -> CargoResult<()> { - if !used.insert(pkg_id) { - return Ok(()); - } - let filtered_deps = PackageSet::filter_deps( - pkg_id, - resolve, - has_dev_units, - requested_kinds, - target_data, - force_all_targets, - ); - for pkg_id in filtered_deps { - collect_used_deps( - used, - resolve, - pkg_id, - has_dev_units, - requested_kinds, - target_data, - force_all_targets, - )?; - } - Ok(()) - } - - // This is sorted by PackageId to get consistent behavior and error - // messages for Cargo's testsuite. Perhaps there is a better ordering - // that optimizes download time? - let mut to_download = BTreeSet::new(); - - for id in root_ids { - collect_used_deps( - &mut to_download, - resolve, - *id, - has_dev_units, - requested_kinds, - target_data, - force_all_targets, - )?; - } - self.get_many(to_download.into_iter())?; - Ok(()) - } - - /// Check if there are any dependency packages that do not have any libs. - pub(crate) fn no_lib_pkgs( - &self, - resolve: &Resolve, - root_ids: &[PackageId], - has_dev_units: HasDevUnits, - requested_kinds: &[CompileKind], - target_data: &RustcTargetData<'_>, - force_all_targets: ForceAllTargets, - ) -> BTreeMap> { - root_ids - .iter() - .map(|&root_id| { - let pkgs = PackageSet::filter_deps( - root_id, - resolve, - has_dev_units, - requested_kinds, - target_data, - force_all_targets, - ) - .filter_map(|package_id| { - if let Ok(dep_pkg) = self.get_one(package_id) { - if !dep_pkg.targets().iter().any(|t| t.is_lib()) { - Some(dep_pkg) - } else { - None - } - } else { - None - } - }) - .collect(); - (root_id, pkgs) - }) - .collect() - } - - fn filter_deps<'a>( - pkg_id: PackageId, - resolve: &'a Resolve, - has_dev_units: HasDevUnits, - requested_kinds: &'a [CompileKind], - target_data: &'a RustcTargetData<'_>, - force_all_targets: ForceAllTargets, - ) -> impl Iterator + 'a { - resolve - .deps(pkg_id) - .filter(move |&(_id, deps)| { - deps.iter().any(|dep| { - if dep.kind() == DepKind::Development && has_dev_units == HasDevUnits::No { - return false; - } - if force_all_targets == ForceAllTargets::No { - let activated = requested_kinds - .iter() - .chain(Some(&CompileKind::Host)) - .any(|kind| target_data.dep_platform_activated(dep, *kind)); - if !activated { - return false; - } - } - true - }) - }) - .map(|(pkg_id, _)| pkg_id) - .into_iter() - } - - pub fn sources(&self) -> Ref<'_, SourceMap<'cfg>> { - self.sources.borrow() - } - - pub fn sources_mut(&self) -> RefMut<'_, SourceMap<'cfg>> { - self.sources.borrow_mut() - } - - /// Merge the given set into self. - pub fn add_set(&mut self, set: PackageSet<'cfg>) { - assert!(!self.downloading.get()); - assert!(!set.downloading.get()); - for (pkg_id, p_cell) in set.packages { - self.packages.entry(pkg_id).or_insert(p_cell); - } - let mut sources = self.sources.borrow_mut(); - let other_sources = set.sources.into_inner(); - sources.add_source_map(other_sources); - } -} - -// When dynamically linked against libcurl, we want to ignore some failures -// when using old versions that don't support certain features. -macro_rules! try_old_curl { - ($e:expr, $msg:expr) => { - let result = $e; - if cfg!(target_os = "macos") { - if let Err(e) = result { - warn!("ignoring libcurl {} error: {}", $msg, e); - } - } else { - result.with_context(|| { - anyhow::format_err!("failed to enable {}, is curl not built right?", $msg) - })?; - } - }; -} - -impl<'a, 'cfg> Downloads<'a, 'cfg> { - /// Starts to download the package for the `id` specified. - /// - /// Returns `None` if the package is queued up for download and will - /// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if - /// the package is ready and doesn't need to be downloaded. - pub fn start(&mut self, id: PackageId) -> CargoResult> { - self.start_inner(id) - .with_context(|| format!("failed to download `{}`", id)) - } - - fn start_inner(&mut self, id: PackageId) -> CargoResult> { - // First up see if we've already cached this package, in which case - // there's nothing to do. - let slot = self - .set - .packages - .get(&id) - .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?; - if let Some(pkg) = slot.borrow() { - return Ok(Some(pkg)); - } - - // Ask the original source fo this `PackageId` for the corresponding - // package. That may immediately come back and tell us that the package - // is ready, or it could tell us that it needs to be downloaded. - let mut sources = self.set.sources.borrow_mut(); - let source = sources - .get_mut(id.source_id()) - .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; - let pkg = source - .download(id) - .with_context(|| "unable to get packages from source")?; - let (url, descriptor) = match pkg { - MaybePackage::Ready(pkg) => { - debug!("{} doesn't need a download", id); - assert!(slot.fill(pkg).is_ok()); - return Ok(Some(slot.borrow().unwrap())); - } - MaybePackage::Download { url, descriptor } => (url, descriptor), - }; - - // Ok we're going to download this crate, so let's set up all our - // internal state and hand off an `Easy` handle to our libcurl `Multi` - // handle. This won't actually start the transfer, but later it'll - // happen during `wait_for_download` - let token = self.next; - self.next += 1; - debug!("downloading {} as {}", id, token); - assert!(self.pending_ids.insert(id)); - - let (mut handle, _timeout) = ops::http_handle_and_timeout(self.set.config)?; - handle.get(true)?; - handle.url(&url)?; - handle.follow_location(true)?; // follow redirects - - // Enable HTTP/2 to be used as it'll allow true multiplexing which makes - // downloads much faster. - // - // Currently Cargo requests the `http2` feature of the `curl` crate - // which means it should always be built in. On OSX, however, we ship - // cargo still linked against the system libcurl. Building curl with - // ALPN support for HTTP/2 requires newer versions of OSX (the - // SecureTransport API) than we want to ship Cargo for. By linking Cargo - // against the system libcurl then older curl installations won't use - // HTTP/2 but newer ones will. All that to basically say we ignore - // errors here on OSX, but consider this a fatal error to not activate - // HTTP/2 on all other platforms. - if self.set.multiplexing { - try_old_curl!(handle.http_version(HttpVersion::V2), "HTTP2"); - } else { - handle.http_version(HttpVersion::V11)?; - } - - // This is an option to `libcurl` which indicates that if there's a - // bunch of parallel requests to the same host they all wait until the - // pipelining status of the host is known. This means that we won't - // initiate dozens of connections to crates.io, but rather only one. - // Once the main one is opened we realized that pipelining is possible - // and multiplexing is possible with static.crates.io. All in all this - // reduces the number of connections done to a more manageable state. - try_old_curl!(handle.pipewait(true), "pipewait"); - - handle.write_function(move |buf| { - debug!("{} - {} bytes of data", token, buf.len()); - tls::with(|downloads| { - if let Some(downloads) = downloads { - downloads.pending[&token] - .0 - .data - .borrow_mut() - .extend_from_slice(buf); - } - }); - Ok(buf.len()) - })?; - - handle.progress(true)?; - handle.progress_function(move |dl_total, dl_cur, _, _| { - tls::with(|downloads| match downloads { - Some(d) => d.progress(token, dl_total as u64, dl_cur as u64), - None => false, - }) - })?; - - // If the progress bar isn't enabled then it may be awhile before the - // first crate finishes downloading so we inform immediately that we're - // downloading crates here. - if self.downloads_finished == 0 - && self.pending.is_empty() - && !self.progress.borrow().as_ref().unwrap().is_enabled() - { - self.set - .config - .shell() - .status("Downloading", "crates ...")?; - } - - let dl = Download { - token, - data: RefCell::new(Vec::new()), - id, - url, - descriptor, - total: Cell::new(0), - current: Cell::new(0), - start: Instant::now(), - timed_out: Cell::new(None), - retry: Retry::new(self.set.config)?, - }; - self.enqueue(dl, handle)?; - self.tick(WhyTick::DownloadStarted)?; - - Ok(None) - } - - /// Returns the number of crates that are still downloading. - pub fn remaining(&self) -> usize { - self.pending.len() - } - - /// Blocks the current thread waiting for a package to finish downloading. - /// - /// This method will wait for a previously enqueued package to finish - /// downloading and return a reference to it after it's done downloading. - /// - /// # Panics - /// - /// This function will panic if there are no remaining downloads. - pub fn wait(&mut self) -> CargoResult<&'a Package> { - let (dl, data) = loop { - assert_eq!(self.pending.len(), self.pending_ids.len()); - let (token, result) = self.wait_for_curl()?; - debug!("{} finished with {:?}", token, result); - - let (mut dl, handle) = self - .pending - .remove(&token) - .expect("got a token for a non-in-progress transfer"); - let data = mem::take(&mut *dl.data.borrow_mut()); - let mut handle = self.set.multi.remove(handle)?; - self.pending_ids.remove(&dl.id); - - // Check if this was a spurious error. If it was a spurious error - // then we want to re-enqueue our request for another attempt and - // then we wait for another request to finish. - let ret = { - let timed_out = &dl.timed_out; - let url = &dl.url; - dl.retry - .r#try(|| { - if let Err(e) = result { - // If this error is "aborted by callback" then that's - // probably because our progress callback aborted due to - // a timeout. We'll find out by looking at the - // `timed_out` field, looking for a descriptive message. - // If one is found we switch the error code (to ensure - // it's flagged as spurious) and then attach our extra - // information to the error. - if !e.is_aborted_by_callback() { - return Err(e.into()); - } - - return Err(match timed_out.replace(None) { - Some(msg) => { - let code = curl_sys::CURLE_OPERATION_TIMEDOUT; - let mut err = curl::Error::new(code); - err.set_extra(msg); - err - } - None => e, - } - .into()); - } - - let code = handle.response_code()?; - if code != 200 && code != 0 { - let url = handle.effective_url()?.unwrap_or(url); - return Err(HttpNot200 { - code, - url: url.to_string(), - } - .into()); - } - Ok(()) - }) - .with_context(|| format!("failed to download from `{}`", dl.url))? - }; - match ret { - Some(()) => break (dl, data), - None => { - self.pending_ids.insert(dl.id); - self.enqueue(dl, handle)? - } - } - }; - - // If the progress bar isn't enabled then we still want to provide some - // semblance of progress of how we're downloading crates, and if the - // progress bar is enabled this provides a good log of what's happening. - self.progress.borrow_mut().as_mut().unwrap().clear(); - self.set - .config - .shell() - .status("Downloaded", &dl.descriptor)?; - - self.downloads_finished += 1; - self.downloaded_bytes += dl.total.get(); - if dl.total.get() > self.largest.0 { - self.largest = (dl.total.get(), dl.id.name().to_string()); - } - - // We're about to synchronously extract the crate below. While we're - // doing that our download progress won't actually be updated, nor do we - // have a great view into the progress of the extraction. Let's prepare - // the user for this CPU-heavy step if it looks like it'll take some - // time to do so. - if dl.total.get() < ByteSize::kb(400).0 { - self.tick(WhyTick::DownloadFinished)?; - } else { - self.tick(WhyTick::Extracting(&dl.id.name()))?; - } - - // Inform the original source that the download is finished which - // should allow us to actually get the package and fill it in now. - let mut sources = self.set.sources.borrow_mut(); - let source = sources - .get_mut(dl.id.source_id()) - .ok_or_else(|| internal(format!("couldn't find source for `{}`", dl.id)))?; - let start = Instant::now(); - let pkg = source.finish_download(dl.id, data)?; - - // Assume that no time has passed while we were calling - // `finish_download`, update all speed checks and timeout limits of all - // active downloads to make sure they don't fire because of a slowly - // extracted tarball. - let finish_dur = start.elapsed(); - self.updated_at.set(self.updated_at.get() + finish_dur); - self.next_speed_check - .set(self.next_speed_check.get() + finish_dur); - - let slot = &self.set.packages[&dl.id]; - assert!(slot.fill(pkg).is_ok()); - Ok(slot.borrow().unwrap()) - } - - fn enqueue(&mut self, dl: Download<'cfg>, handle: Easy) -> CargoResult<()> { - let mut handle = self.set.multi.add(handle)?; - let now = Instant::now(); - handle.set_token(dl.token)?; - self.updated_at.set(now); - self.next_speed_check.set(now + self.timeout.dur); - self.next_speed_check_bytes_threshold - .set(u64::from(self.timeout.low_speed_limit)); - dl.timed_out.set(None); - dl.current.set(0); - dl.total.set(0); - self.pending.insert(dl.token, (dl, handle)); - Ok(()) - } - - /// Block, waiting for curl. Returns a token and a `Result` for that token - /// (`Ok` means the download successfully finished). - fn wait_for_curl(&mut self) -> CargoResult<(usize, Result<(), curl::Error>)> { - // This is the main workhorse loop. We use libcurl's portable `wait` - // method to actually perform blocking. This isn't necessarily too - // efficient in terms of fd management, but we should only be juggling - // a few anyway. - // - // Here we start off by asking the `multi` handle to do some work via - // the `perform` method. This will actually do I/O work (non-blocking) - // and attempt to make progress. Afterwards we ask about the `messages` - // contained in the handle which will inform us if anything has finished - // transferring. - // - // If we've got a finished transfer after all that work we break out - // and process the finished transfer at the end. Otherwise we need to - // actually block waiting for I/O to happen, which we achieve with the - // `wait` method on `multi`. - loop { - let n = tls::set(self, || { - self.set - .multi - .perform() - .with_context(|| "failed to perform http requests") - })?; - debug!("handles remaining: {}", n); - let results = &mut self.results; - let pending = &self.pending; - self.set.multi.messages(|msg| { - let token = msg.token().expect("failed to read token"); - let handle = &pending[&token].1; - if let Some(result) = msg.result_for(handle) { - results.push((token, result)); - } else { - debug!("message without a result (?)"); - } - }); - - if let Some(pair) = results.pop() { - break Ok(pair); - } - assert!(!self.pending.is_empty()); - let timeout = self - .set - .multi - .get_timeout()? - .unwrap_or_else(|| Duration::new(5, 0)); - self.set - .multi - .wait(&mut [], timeout) - .with_context(|| "failed to wait on curl `Multi`")?; - } - } - - fn progress(&self, token: usize, total: u64, cur: u64) -> bool { - let dl = &self.pending[&token].0; - dl.total.set(total); - let now = Instant::now(); - if cur > dl.current.get() { - let delta = cur - dl.current.get(); - let threshold = self.next_speed_check_bytes_threshold.get(); - - dl.current.set(cur); - self.updated_at.set(now); - - if delta >= threshold { - self.next_speed_check.set(now + self.timeout.dur); - self.next_speed_check_bytes_threshold - .set(u64::from(self.timeout.low_speed_limit)); - } else { - self.next_speed_check_bytes_threshold.set(threshold - delta); - } - } - if self.tick(WhyTick::DownloadUpdate).is_err() { - return false; - } - - // If we've spent too long not actually receiving any data we time out. - if now > self.updated_at.get() + self.timeout.dur { - self.updated_at.set(now); - let msg = format!( - "failed to download any data for `{}` within {}s", - dl.id, - self.timeout.dur.as_secs() - ); - dl.timed_out.set(Some(msg)); - return false; - } - - // If we reached the point in time that we need to check our speed - // limit, see if we've transferred enough data during this threshold. If - // it fails this check then we fail because the download is going too - // slowly. - if now >= self.next_speed_check.get() { - self.next_speed_check.set(now + self.timeout.dur); - assert!(self.next_speed_check_bytes_threshold.get() > 0); - let msg = format!( - "download of `{}` failed to transfer more \ - than {} bytes in {}s", - dl.id, - self.timeout.low_speed_limit, - self.timeout.dur.as_secs() - ); - dl.timed_out.set(Some(msg)); - return false; - } - - true - } - - fn tick(&self, why: WhyTick<'_>) -> CargoResult<()> { - let mut progress = self.progress.borrow_mut(); - let progress = progress.as_mut().unwrap(); - - if let WhyTick::DownloadUpdate = why { - if !progress.update_allowed() { - return Ok(()); - } - } - let pending = self.pending.len(); - let mut msg = if pending == 1 { - format!("{} crate", pending) - } else { - format!("{} crates", pending) - }; - match why { - WhyTick::Extracting(krate) => { - msg.push_str(&format!(", extracting {} ...", krate)); - } - _ => { - let mut dur = Duration::new(0, 0); - let mut remaining = 0; - for (dl, _) in self.pending.values() { - dur += dl.start.elapsed(); - // If the total/current look weird just throw out the data - // point, sounds like curl has more to learn before we have - // the true information. - if dl.total.get() >= dl.current.get() { - remaining += dl.total.get() - dl.current.get(); - } - } - if remaining > 0 && dur > Duration::from_millis(500) { - msg.push_str(&format!(", remaining bytes: {}", ByteSize(remaining))); - } - } - } - progress.print_now(&msg) - } -} - -#[derive(Copy, Clone)] -enum WhyTick<'a> { - DownloadStarted, - DownloadUpdate, - DownloadFinished, - Extracting(&'a str), -} - -impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> { - fn drop(&mut self) { - self.set.downloading.set(false); - let progress = self.progress.get_mut().take().unwrap(); - // Don't print a download summary if we're not using a progress bar, - // we've already printed lots of `Downloading...` items. - if !progress.is_enabled() { - return; - } - // If we didn't download anything, no need for a summary. - if self.downloads_finished == 0 { - return; - } - // If an error happened, let's not clutter up the output. - if !self.success { - return; - } - // pick the correct plural of crate(s) - let crate_string = if self.downloads_finished == 1 { - "crate" - } else { - "crates" - }; - let mut status = format!( - "{} {} ({}) in {}", - self.downloads_finished, - crate_string, - ByteSize(self.downloaded_bytes), - util::elapsed(self.start.elapsed()) - ); - // print the size of largest crate if it was >1mb - // however don't print if only a single crate was downloaded - // because it is obvious that it will be the largest then - if self.largest.0 > ByteSize::mb(1).0 && self.downloads_finished > 1 { - status.push_str(&format!( - " (largest was `{}` at {})", - self.largest.1, - ByteSize(self.largest.0), - )); - } - // Clear progress before displaying final summary. - drop(progress); - drop(self.set.config.shell().status("Downloaded", status)); - } -} - -mod tls { - use std::cell::Cell; - - use super::Downloads; - - thread_local!(static PTR: Cell = Cell::new(0)); - - pub(crate) fn with(f: impl FnOnce(Option<&Downloads<'_, '_>>) -> R) -> R { - let ptr = PTR.with(|p| p.get()); - if ptr == 0 { - f(None) - } else { - unsafe { f(Some(&*(ptr as *const Downloads<'_, '_>))) } - } - } - - pub(crate) fn set(dl: &Downloads<'_, '_>, f: impl FnOnce() -> R) -> R { - struct Reset<'a, T: Copy>(&'a Cell, T); - - impl<'a, T: Copy> Drop for Reset<'a, T> { - fn drop(&mut self) { - self.0.set(self.1); - } - } - - PTR.with(|p| { - let _reset = Reset(p, p.get()); - p.set(dl as *const Downloads<'_, '_> as usize); - f() - }) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package_id.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package_id.rs deleted file mode 100644 index 2c53f9050..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package_id.rs +++ /dev/null @@ -1,293 +0,0 @@ -use std::collections::HashSet; -use std::fmt::{self, Formatter}; -use std::hash; -use std::hash::Hash; -use std::path::Path; -use std::ptr; -use std::sync::Mutex; - -use serde::de; -use serde::ser; - -use crate::core::source::SourceId; -use crate::util::interning::InternedString; -use crate::util::{CargoResult, ToSemver}; - -lazy_static::lazy_static! { - static ref PACKAGE_ID_CACHE: Mutex> = - Mutex::new(HashSet::new()); -} - -/// Identifier for a specific version of a package in a specific source. -#[derive(Clone, Copy, Eq, PartialOrd, Ord)] -pub struct PackageId { - inner: &'static PackageIdInner, -} - -#[derive(PartialOrd, Eq, Ord)] -struct PackageIdInner { - name: InternedString, - version: semver::Version, - source_id: SourceId, -} - -// Custom equality that uses full equality of SourceId, rather than its custom equality, -// and Version, which usually ignores `build` metadata. -// -// The `build` part of the version is usually ignored (like a "comment"). -// However, there are some cases where it is important. The download path from -// a registry includes the build metadata, and Cargo uses PackageIds for -// creating download paths. Including it here prevents the PackageId interner -// from getting poisoned with PackageIds where that build metadata is missing. -impl PartialEq for PackageIdInner { - fn eq(&self, other: &Self) -> bool { - self.name == other.name - && self.version.major == other.version.major - && self.version.minor == other.version.minor - && self.version.patch == other.version.patch - && self.version.pre == other.version.pre - && self.version.build == other.version.build - && self.source_id.full_eq(other.source_id) - } -} - -// Custom hash that is coherent with the custom equality above. -impl Hash for PackageIdInner { - fn hash(&self, into: &mut S) { - self.name.hash(into); - self.version.major.hash(into); - self.version.minor.hash(into); - self.version.patch.hash(into); - self.version.pre.hash(into); - self.version.build.hash(into); - self.source_id.full_hash(into); - } -} - -impl ser::Serialize for PackageId { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - s.collect_str(&format_args!( - "{} {} ({})", - self.inner.name, - self.inner.version, - self.inner.source_id.as_url() - )) - } -} - -impl<'de> de::Deserialize<'de> for PackageId { - fn deserialize(d: D) -> Result - where - D: de::Deserializer<'de>, - { - let string = String::deserialize(d)?; - let mut s = string.splitn(3, ' '); - let name = s.next().unwrap(); - let name = InternedString::new(name); - let version = match s.next() { - Some(s) => s, - None => return Err(de::Error::custom("invalid serialized PackageId")), - }; - let version = version.to_semver().map_err(de::Error::custom)?; - let url = match s.next() { - Some(s) => s, - None => return Err(de::Error::custom("invalid serialized PackageId")), - }; - let url = if url.starts_with('(') && url.ends_with(')') { - &url[1..url.len() - 1] - } else { - return Err(de::Error::custom("invalid serialized PackageId")); - }; - let source_id = SourceId::from_url(url).map_err(de::Error::custom)?; - - Ok(PackageId::pure(name, version, source_id)) - } -} - -impl PartialEq for PackageId { - fn eq(&self, other: &PackageId) -> bool { - if ptr::eq(self.inner, other.inner) { - return true; - } - // This is here so that PackageId uses SourceId's and Version's idea - // of equality. PackageIdInner uses a more exact notion of equality. - self.inner.name == other.inner.name - && self.inner.version == other.inner.version - && self.inner.source_id == other.inner.source_id - } -} - -impl Hash for PackageId { - fn hash(&self, state: &mut S) { - // This is here (instead of derived) so that PackageId uses SourceId's - // and Version's idea of equality. PackageIdInner uses a more exact - // notion of hashing. - self.inner.name.hash(state); - self.inner.version.hash(state); - self.inner.source_id.hash(state); - } -} - -impl PackageId { - pub fn new( - name: impl Into, - version: T, - sid: SourceId, - ) -> CargoResult { - let v = version.to_semver()?; - Ok(PackageId::pure(name.into(), v, sid)) - } - - pub fn pure(name: InternedString, version: semver::Version, source_id: SourceId) -> PackageId { - let inner = PackageIdInner { - name, - version, - source_id, - }; - let mut cache = PACKAGE_ID_CACHE.lock().unwrap(); - let inner = cache.get(&inner).cloned().unwrap_or_else(|| { - let inner = Box::leak(Box::new(inner)); - cache.insert(inner); - inner - }); - PackageId { inner } - } - - pub fn name(self) -> InternedString { - self.inner.name - } - pub fn version(self) -> &'static semver::Version { - &self.inner.version - } - pub fn source_id(self) -> SourceId { - self.inner.source_id - } - - pub fn with_precise(self, precise: Option) -> PackageId { - PackageId::pure( - self.inner.name, - self.inner.version.clone(), - self.inner.source_id.with_precise(precise), - ) - } - - pub fn with_source_id(self, source: SourceId) -> PackageId { - PackageId::pure(self.inner.name, self.inner.version.clone(), source) - } - - pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Self { - if self.source_id() == to_replace { - self.with_source_id(replace_with) - } else { - self - } - } - - /// Returns a value that implements a "stable" hashable value. - /// - /// Stable hashing removes the path prefix of the workspace from path - /// packages. This helps with reproducible builds, since this hash is part - /// of the symbol metadata, and we don't want the absolute path where the - /// build is performed to affect the binary output. - pub fn stable_hash(self, workspace: &Path) -> PackageIdStableHash<'_> { - PackageIdStableHash(self, workspace) - } -} - -pub struct PackageIdStableHash<'a>(PackageId, &'a Path); - -impl<'a> Hash for PackageIdStableHash<'a> { - fn hash(&self, state: &mut S) { - self.0.inner.name.hash(state); - self.0.inner.version.hash(state); - self.0.inner.source_id.stable_hash(self.1, state); - } -} - -impl fmt::Display for PackageId { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "{} v{}", self.inner.name, self.inner.version)?; - - if !self.inner.source_id.is_default_registry() { - write!(f, " ({})", self.inner.source_id)?; - } - - Ok(()) - } -} - -impl fmt::Debug for PackageId { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.debug_struct("PackageId") - .field("name", &self.inner.name) - .field("version", &self.inner.version.to_string()) - .field("source", &self.inner.source_id.to_string()) - .finish() - } -} - -#[cfg(test)] -mod tests { - use super::PackageId; - use crate::core::source::SourceId; - use crate::sources::CRATES_IO_INDEX; - use crate::util::IntoUrl; - - #[test] - fn invalid_version_handled_nicely() { - let loc = CRATES_IO_INDEX.into_url().unwrap(); - let repo = SourceId::for_registry(&loc).unwrap(); - - assert!(PackageId::new("foo", "1.0", repo).is_err()); - assert!(PackageId::new("foo", "1", repo).is_err()); - assert!(PackageId::new("foo", "bar", repo).is_err()); - assert!(PackageId::new("foo", "", repo).is_err()); - } - - #[test] - fn debug() { - let loc = CRATES_IO_INDEX.into_url().unwrap(); - let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap(); - assert_eq!( - r#"PackageId { name: "foo", version: "1.0.0", source: "registry `crates-io`" }"#, - format!("{:?}", pkg_id) - ); - - let expected = r#" -PackageId { - name: "foo", - version: "1.0.0", - source: "registry `crates-io`", -} -"# - .trim(); - - // Can be removed once trailing commas in Debug have reached the stable - // channel. - let expected_without_trailing_comma = r#" -PackageId { - name: "foo", - version: "1.0.0", - source: "registry `crates-io`" -} -"# - .trim(); - - let actual = format!("{:#?}", pkg_id); - if actual.ends_with(",\n}") { - assert_eq!(actual, expected); - } else { - assert_eq!(actual, expected_without_trailing_comma); - } - } - - #[test] - fn display() { - let loc = CRATES_IO_INDEX.into_url().unwrap(); - let pkg_id = PackageId::new("foo", "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap(); - assert_eq!("foo v1.0.0", pkg_id.to_string()); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package_id_spec.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package_id_spec.rs deleted file mode 100644 index 723b624e8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/package_id_spec.rs +++ /dev/null @@ -1,401 +0,0 @@ -use std::collections::HashMap; -use std::fmt; - -use anyhow::{bail, Context as _}; -use semver::Version; -use serde::{de, ser}; -use url::Url; - -use crate::core::PackageId; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::lev_distance; -use crate::util::{validate_package_name, IntoUrl, ToSemver}; - -/// Some or all of the data required to identify a package: -/// -/// 1. the package name (a `String`, required) -/// 2. the package version (a `Version`, optional) -/// 3. the package source (a `Url`, optional) -/// -/// If any of the optional fields are omitted, then the package ID may be ambiguous, there may be -/// more than one package/version/url combo that will match. However, often just the name is -/// sufficient to uniquely define a package ID. -#[derive(Clone, PartialEq, Eq, Debug, Hash, Ord, PartialOrd)] -pub struct PackageIdSpec { - name: InternedString, - version: Option, - url: Option, -} - -impl PackageIdSpec { - /// Parses a spec string and returns a `PackageIdSpec` if the string was valid. - /// - /// # Examples - /// Some examples of valid strings - /// - /// ``` - /// use cargo::core::PackageIdSpec; - /// - /// let specs = vec![ - /// "https://crates.io/foo", - /// "https://crates.io/foo#1.2.3", - /// "https://crates.io/foo#bar:1.2.3", - /// "foo", - /// "foo:1.2.3", - /// ]; - /// for spec in specs { - /// assert!(PackageIdSpec::parse(spec).is_ok()); - /// } - pub fn parse(spec: &str) -> CargoResult { - if spec.contains("://") { - if let Ok(url) = spec.into_url() { - return PackageIdSpec::from_url(url); - } - } else if spec.contains('/') || spec.contains('\\') { - let abs = std::env::current_dir().unwrap_or_default().join(spec); - if abs.exists() { - let maybe_url = Url::from_file_path(abs) - .map_or_else(|_| "a file:// URL".to_string(), |url| url.to_string()); - bail!( - "package ID specification `{}` looks like a file path, \ - maybe try {}", - spec, - maybe_url - ); - } - } - let mut parts = spec.splitn(2, ':'); - let name = parts.next().unwrap(); - let version = match parts.next() { - Some(version) => Some(version.to_semver()?), - None => None, - }; - validate_package_name(name, "pkgid", "")?; - Ok(PackageIdSpec { - name: InternedString::new(name), - version, - url: None, - }) - } - - /// Roughly equivalent to `PackageIdSpec::parse(spec)?.query(i)` - pub fn query_str(spec: &str, i: I) -> CargoResult - where - I: IntoIterator, - { - let i: Vec<_> = i.into_iter().collect(); - let spec = PackageIdSpec::parse(spec).with_context(|| { - let suggestion = lev_distance::closest_msg(spec, i.iter(), |id| id.name().as_str()); - format!("invalid package ID specification: `{}`{}", spec, suggestion) - })?; - spec.query(i) - } - - /// Convert a `PackageId` to a `PackageIdSpec`, which will have both the `Version` and `Url` - /// fields filled in. - pub fn from_package_id(package_id: PackageId) -> PackageIdSpec { - PackageIdSpec { - name: package_id.name(), - version: Some(package_id.version().clone()), - url: Some(package_id.source_id().url().clone()), - } - } - - /// Tries to convert a valid `Url` to a `PackageIdSpec`. - fn from_url(mut url: Url) -> CargoResult { - if url.query().is_some() { - bail!("cannot have a query string in a pkgid: {}", url) - } - let frag = url.fragment().map(|s| s.to_owned()); - url.set_fragment(None); - let (name, version) = { - let mut path = url - .path_segments() - .ok_or_else(|| anyhow::format_err!("pkgid urls must have a path: {}", url))?; - let path_name = path.next_back().ok_or_else(|| { - anyhow::format_err!( - "pkgid urls must have at least one path \ - component: {}", - url - ) - })?; - match frag { - Some(fragment) => { - let mut parts = fragment.splitn(2, ':'); - let name_or_version = parts.next().unwrap(); - match parts.next() { - Some(part) => { - let version = part.to_semver()?; - (InternedString::new(name_or_version), Some(version)) - } - None => { - if name_or_version.chars().next().unwrap().is_alphabetic() { - (InternedString::new(name_or_version), None) - } else { - let version = name_or_version.to_semver()?; - (InternedString::new(path_name), Some(version)) - } - } - } - } - None => (InternedString::new(path_name), None), - } - }; - Ok(PackageIdSpec { - name, - version, - url: Some(url), - }) - } - - pub fn name(&self) -> InternedString { - self.name - } - - pub fn version(&self) -> Option<&Version> { - self.version.as_ref() - } - - pub fn url(&self) -> Option<&Url> { - self.url.as_ref() - } - - pub fn set_url(&mut self, url: Url) { - self.url = Some(url); - } - - /// Checks whether the given `PackageId` matches the `PackageIdSpec`. - pub fn matches(&self, package_id: PackageId) -> bool { - if self.name() != package_id.name() { - return false; - } - - if let Some(ref v) = self.version { - if v != package_id.version() { - return false; - } - } - - match self.url { - Some(ref u) => u == package_id.source_id().url(), - None => true, - } - } - - /// Checks a list of `PackageId`s to find 1 that matches this `PackageIdSpec`. If 0, 2, or - /// more are found, then this returns an error. - pub fn query(&self, i: I) -> CargoResult - where - I: IntoIterator, - { - let all_ids: Vec<_> = i.into_iter().collect(); - let mut ids = all_ids.iter().copied().filter(|&id| self.matches(id)); - let ret = match ids.next() { - Some(id) => id, - None => { - let mut suggestion = String::new(); - let try_spec = |spec: PackageIdSpec, suggestion: &mut String| { - let try_matches: Vec<_> = all_ids - .iter() - .copied() - .filter(|&id| spec.matches(id)) - .collect(); - if !try_matches.is_empty() { - suggestion.push_str("\nDid you mean one of these?\n"); - minimize(suggestion, &try_matches, self); - } - }; - if self.url.is_some() { - try_spec( - PackageIdSpec { - name: self.name, - version: self.version.clone(), - url: None, - }, - &mut suggestion, - ); - } - if suggestion.is_empty() && self.version.is_some() { - try_spec( - PackageIdSpec { - name: self.name, - version: None, - url: None, - }, - &mut suggestion, - ); - } - if suggestion.is_empty() { - suggestion.push_str(&lev_distance::closest_msg( - &self.name, - all_ids.iter(), - |id| id.name().as_str(), - )); - } - - bail!( - "package ID specification `{}` did not match any packages{}", - self, - suggestion - ); - } - }; - return match ids.next() { - Some(other) => { - let mut msg = format!( - "There are multiple `{}` packages in \ - your project, and the specification \ - `{}` is ambiguous.\n\ - Please re-run this command \ - with `-p ` where `` is one \ - of the following:", - self.name(), - self - ); - let mut vec = vec![ret, other]; - vec.extend(ids); - minimize(&mut msg, &vec, self); - Err(anyhow::format_err!("{}", msg)) - } - None => Ok(ret), - }; - - fn minimize(msg: &mut String, ids: &[PackageId], spec: &PackageIdSpec) { - let mut version_cnt = HashMap::new(); - for id in ids { - *version_cnt.entry(id.version()).or_insert(0) += 1; - } - for id in ids { - if version_cnt[id.version()] == 1 { - msg.push_str(&format!("\n {}:{}", spec.name(), id.version())); - } else { - msg.push_str(&format!("\n {}", PackageIdSpec::from_package_id(*id))); - } - } - } - } -} - -impl fmt::Display for PackageIdSpec { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut printed_name = false; - match self.url { - Some(ref url) => { - write!(f, "{}", url)?; - if url.path_segments().unwrap().next_back().unwrap() != &*self.name { - printed_name = true; - write!(f, "#{}", self.name)?; - } - } - None => { - printed_name = true; - write!(f, "{}", self.name)? - } - } - if let Some(ref v) = self.version { - write!(f, "{}{}", if printed_name { ":" } else { "#" }, v)?; - } - Ok(()) - } -} - -impl ser::Serialize for PackageIdSpec { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - self.to_string().serialize(s) - } -} - -impl<'de> de::Deserialize<'de> for PackageIdSpec { - fn deserialize(d: D) -> Result - where - D: de::Deserializer<'de>, - { - let string = String::deserialize(d)?; - PackageIdSpec::parse(&string).map_err(de::Error::custom) - } -} - -#[cfg(test)] -mod tests { - use super::PackageIdSpec; - use crate::core::{PackageId, SourceId}; - use crate::util::interning::InternedString; - use crate::util::ToSemver; - use url::Url; - - #[test] - fn good_parsing() { - fn ok(spec: &str, expected: PackageIdSpec) { - let parsed = PackageIdSpec::parse(spec).unwrap(); - assert_eq!(parsed, expected); - assert_eq!(parsed.to_string(), spec); - } - - ok( - "https://crates.io/foo", - PackageIdSpec { - name: InternedString::new("foo"), - version: None, - url: Some(Url::parse("https://crates.io/foo").unwrap()), - }, - ); - ok( - "https://crates.io/foo#1.2.3", - PackageIdSpec { - name: InternedString::new("foo"), - version: Some("1.2.3".to_semver().unwrap()), - url: Some(Url::parse("https://crates.io/foo").unwrap()), - }, - ); - ok( - "https://crates.io/foo#bar:1.2.3", - PackageIdSpec { - name: InternedString::new("bar"), - version: Some("1.2.3".to_semver().unwrap()), - url: Some(Url::parse("https://crates.io/foo").unwrap()), - }, - ); - ok( - "foo", - PackageIdSpec { - name: InternedString::new("foo"), - version: None, - url: None, - }, - ); - ok( - "foo:1.2.3", - PackageIdSpec { - name: InternedString::new("foo"), - version: Some("1.2.3".to_semver().unwrap()), - url: None, - }, - ); - } - - #[test] - fn bad_parsing() { - assert!(PackageIdSpec::parse("baz:").is_err()); - assert!(PackageIdSpec::parse("baz:*").is_err()); - assert!(PackageIdSpec::parse("baz:1.0").is_err()); - assert!(PackageIdSpec::parse("https://baz:1.0").is_err()); - assert!(PackageIdSpec::parse("https://#baz:1.0").is_err()); - } - - #[test] - fn matching() { - let url = Url::parse("https://example.com").unwrap(); - let sid = SourceId::for_registry(&url).unwrap(); - let foo = PackageId::new("foo", "1.2.3", sid).unwrap(); - let bar = PackageId::new("bar", "1.2.3", sid).unwrap(); - - assert!(PackageIdSpec::parse("foo").unwrap().matches(foo)); - assert!(!PackageIdSpec::parse("foo").unwrap().matches(bar)); - assert!(PackageIdSpec::parse("foo:1.2.3").unwrap().matches(foo)); - assert!(!PackageIdSpec::parse("foo:1.2.2").unwrap().matches(foo)); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/profiles.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/profiles.rs deleted file mode 100644 index 51e4ae349..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/profiles.rs +++ /dev/null @@ -1,1297 +0,0 @@ -use crate::core::compiler::{CompileKind, CompileMode, Unit}; -use crate::core::resolver::features::FeaturesFor; -use crate::core::{Feature, PackageId, PackageIdSpec, Resolve, Shell, Target, Workspace}; -use crate::util::interning::InternedString; -use crate::util::toml::{ProfilePackageSpec, StringOrBool, TomlProfile, TomlProfiles, U32OrBool}; -use crate::util::{closest_msg, config, CargoResult, Config}; -use anyhow::{bail, Context as _}; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::hash::Hash; -use std::{cmp, env, fmt, hash}; - -/// Collection of all profiles. -#[derive(Clone, Debug)] -pub struct Profiles { - /// Incremental compilation can be overridden globally via: - /// - `CARGO_INCREMENTAL` environment variable. - /// - `build.incremental` config value. - incremental: Option, - /// Map of profile name to directory name for that profile. - dir_names: HashMap, - /// The profile makers. Key is the profile name. - by_name: HashMap, - /// The original profiles written by the user in the manifest and config. - /// - /// This is here to assist with error reporting, as the `ProfileMaker` - /// values have the inherits chains all merged together. - original_profiles: BTreeMap, - /// Whether or not unstable "named" profiles are enabled. - named_profiles_enabled: bool, - /// The profile the user requested to use. - requested_profile: InternedString, - /// The host target for rustc being used by this `Profiles`. - rustc_host: InternedString, -} - -impl Profiles { - pub fn new(ws: &Workspace<'_>, requested_profile: InternedString) -> CargoResult { - let config = ws.config(); - let incremental = match env::var_os("CARGO_INCREMENTAL") { - Some(v) => Some(v == "1"), - None => config.build_config()?.incremental, - }; - let mut profiles = merge_config_profiles(ws, requested_profile)?; - let rustc_host = ws.config().load_global_rustc(Some(ws))?.host; - - if !ws.unstable_features().is_enabled(Feature::named_profiles()) { - let mut profile_makers = Profiles { - incremental, - named_profiles_enabled: false, - dir_names: Self::predefined_dir_names(), - by_name: HashMap::new(), - original_profiles: profiles.clone(), - requested_profile, - rustc_host, - }; - - profile_makers.by_name.insert( - InternedString::new("dev"), - ProfileMaker::new(Profile::default_dev(), profiles.remove("dev")), - ); - profile_makers - .dir_names - .insert(InternedString::new("dev"), InternedString::new("debug")); - - profile_makers.by_name.insert( - InternedString::new("release"), - ProfileMaker::new(Profile::default_release(), profiles.remove("release")), - ); - profile_makers.dir_names.insert( - InternedString::new("release"), - InternedString::new("release"), - ); - - profile_makers.by_name.insert( - InternedString::new("test"), - ProfileMaker::new(Profile::default_test(), profiles.remove("test")), - ); - profile_makers - .dir_names - .insert(InternedString::new("test"), InternedString::new("debug")); - - profile_makers.by_name.insert( - InternedString::new("bench"), - ProfileMaker::new(Profile::default_bench(), profiles.remove("bench")), - ); - profile_makers - .dir_names - .insert(InternedString::new("bench"), InternedString::new("release")); - - profile_makers.by_name.insert( - InternedString::new("doc"), - ProfileMaker::new(Profile::default_doc(), profiles.remove("doc")), - ); - profile_makers - .dir_names - .insert(InternedString::new("doc"), InternedString::new("debug")); - - return Ok(profile_makers); - } - - let mut profile_makers = Profiles { - incremental, - named_profiles_enabled: true, - dir_names: Self::predefined_dir_names(), - by_name: HashMap::new(), - original_profiles: profiles.clone(), - requested_profile, - rustc_host, - }; - - Self::add_root_profiles(&mut profile_makers, &profiles); - - // Merge with predefined profiles. - use std::collections::btree_map::Entry; - for (predef_name, mut predef_prof) in Self::predefined_profiles().into_iter() { - match profiles.entry(InternedString::new(predef_name)) { - Entry::Vacant(vac) => { - vac.insert(predef_prof); - } - Entry::Occupied(mut oc) => { - // Override predefined with the user-provided Toml. - let r = oc.get_mut(); - predef_prof.merge(r); - *r = predef_prof; - } - } - } - - for (name, profile) in &profiles { - profile_makers.add_maker(*name, profile, &profiles)?; - } - // Verify that the requested profile is defined *somewhere*. - // This simplifies the API (no need for CargoResult), and enforces - // assumptions about how config profiles are loaded. - profile_makers.get_profile_maker(requested_profile)?; - Ok(profile_makers) - } - - /// Returns the hard-coded directory names for built-in profiles. - fn predefined_dir_names() -> HashMap { - let mut dir_names = HashMap::new(); - dir_names.insert(InternedString::new("dev"), InternedString::new("debug")); - dir_names.insert(InternedString::new("test"), InternedString::new("debug")); - dir_names.insert(InternedString::new("bench"), InternedString::new("release")); - dir_names - } - - /// Initialize `by_name` with the two "root" profiles, `dev`, and - /// `release` given the user's definition. - fn add_root_profiles( - profile_makers: &mut Profiles, - profiles: &BTreeMap, - ) { - profile_makers.by_name.insert( - InternedString::new("dev"), - ProfileMaker::new(Profile::default_dev(), profiles.get("dev").cloned()), - ); - - profile_makers.by_name.insert( - InternedString::new("release"), - ProfileMaker::new(Profile::default_release(), profiles.get("release").cloned()), - ); - } - - /// Returns the built-in profiles (not including dev/release, which are - /// "root" profiles). - fn predefined_profiles() -> Vec<(&'static str, TomlProfile)> { - vec![ - ( - "bench", - TomlProfile { - inherits: Some(InternedString::new("release")), - ..TomlProfile::default() - }, - ), - ( - "test", - TomlProfile { - inherits: Some(InternedString::new("dev")), - ..TomlProfile::default() - }, - ), - ( - "doc", - TomlProfile { - inherits: Some(InternedString::new("dev")), - ..TomlProfile::default() - }, - ), - ] - } - - /// Creates a `ProfileMaker`, and inserts it into `self.by_name`. - fn add_maker( - &mut self, - name: InternedString, - profile: &TomlProfile, - profiles: &BTreeMap, - ) -> CargoResult<()> { - match &profile.dir_name { - None => {} - Some(dir_name) => { - self.dir_names.insert(name, dir_name.to_owned()); - } - } - - // dev/release are "roots" and don't inherit. - if name == "dev" || name == "release" { - if profile.inherits.is_some() { - bail!( - "`inherits` must not be specified in root profile `{}`", - name - ); - } - // Already inserted from `add_root_profiles`, no need to do anything. - return Ok(()); - } - - // Keep track for inherits cycles. - let mut set = HashSet::new(); - set.insert(name); - let maker = self.process_chain(name, profile, &mut set, profiles)?; - self.by_name.insert(name, maker); - Ok(()) - } - - /// Build a `ProfileMaker` by recursively following the `inherits` setting. - /// - /// * `name`: The name of the profile being processed. - /// * `profile`: The TOML profile being processed. - /// * `set`: Set of profiles that have been visited, used to detect cycles. - /// * `profiles`: Map of all TOML profiles. - /// - /// Returns a `ProfileMaker` to be used for the given named profile. - fn process_chain( - &mut self, - name: InternedString, - profile: &TomlProfile, - set: &mut HashSet, - profiles: &BTreeMap, - ) -> CargoResult { - let mut maker = match profile.inherits { - Some(inherits_name) if inherits_name == "dev" || inherits_name == "release" => { - // These are the root profiles added in `add_root_profiles`. - self.get_profile_maker(inherits_name).unwrap().clone() - } - Some(inherits_name) => { - if !set.insert(inherits_name) { - bail!( - "profile inheritance loop detected with profile `{}` inheriting `{}`", - name, - inherits_name - ); - } - - match profiles.get(&inherits_name) { - None => { - bail!( - "profile `{}` inherits from `{}`, but that profile is not defined", - name, - inherits_name - ); - } - Some(parent) => self.process_chain(inherits_name, parent, set, profiles)?, - } - } - None => { - bail!( - "profile `{}` is missing an `inherits` directive \ - (`inherits` is required for all profiles except `dev` or `release`)", - name - ); - } - }; - match &mut maker.toml { - Some(toml) => toml.merge(profile), - None => maker.toml = Some(profile.clone()), - }; - Ok(maker) - } - - /// Retrieves the profile for a target. - /// `is_member` is whether or not this package is a member of the - /// workspace. - pub fn get_profile( - &self, - pkg_id: PackageId, - is_member: bool, - is_local: bool, - unit_for: UnitFor, - mode: CompileMode, - kind: CompileKind, - ) -> Profile { - let (profile_name, inherits) = if !self.named_profiles_enabled { - // With the feature disabled, we degrade `--profile` back to the - // `--release` and `--debug` predicates, and convert back from - // ProfileKind::Custom instantiation. - - let release = matches!(self.requested_profile.as_str(), "release" | "bench"); - - match mode { - CompileMode::Test | CompileMode::Bench | CompileMode::Doctest => { - if release { - ( - InternedString::new("bench"), - Some(InternedString::new("release")), - ) - } else { - ( - InternedString::new("test"), - Some(InternedString::new("dev")), - ) - } - } - CompileMode::Build | CompileMode::Check { .. } | CompileMode::RunCustomBuild => { - // Note: `RunCustomBuild` doesn't normally use this code path. - // `build_unit_profiles` normally ensures that it selects the - // ancestor's profile. However, `cargo clean -p` can hit this - // path. - if release { - (InternedString::new("release"), None) - } else { - (InternedString::new("dev"), None) - } - } - CompileMode::Doc { .. } | CompileMode::Docscrape => { - (InternedString::new("doc"), None) - } - } - } else { - (self.requested_profile, None) - }; - let maker = self.get_profile_maker(profile_name).unwrap(); - let mut profile = maker.get_profile(Some(pkg_id), is_member, unit_for); - - // Dealing with `panic=abort` and `panic=unwind` requires some special - // treatment. Be sure to process all the various options here. - match unit_for.panic_setting() { - PanicSetting::AlwaysUnwind => profile.panic = PanicStrategy::Unwind, - PanicSetting::ReadProfile => {} - PanicSetting::Inherit => { - if let Some(inherits) = inherits { - // TODO: Fixme, broken with named profiles. - let maker = self.get_profile_maker(inherits).unwrap(); - profile.panic = maker.get_profile(Some(pkg_id), is_member, unit_for).panic; - } - } - } - - // Default macOS debug information to being stored in the "unpacked" - // split-debuginfo format. At the time of this writing that's the only - // platform which has a stable `-Csplit-debuginfo` option for rustc, - // and it's typically much faster than running `dsymutil` on all builds - // in incremental cases. - if let Some(debug) = profile.debuginfo { - if profile.split_debuginfo.is_none() && debug > 0 { - let target = match &kind { - CompileKind::Host => self.rustc_host.as_str(), - CompileKind::Target(target) => target.short_name(), - }; - if target.contains("-apple-") { - profile.split_debuginfo = Some(InternedString::new("unpacked")); - } - } - } - - // Incremental can be globally overridden. - if let Some(v) = self.incremental { - profile.incremental = v; - } - - // Only enable incremental compilation for sources the user can - // modify (aka path sources). For things that change infrequently, - // non-incremental builds yield better performance in the compiler - // itself (aka crates.io / git dependencies) - // - // (see also https://github.com/rust-lang/cargo/issues/3972) - if !is_local { - profile.incremental = false; - } - profile.name = profile_name; - profile - } - - /// The profile for *running* a `build.rs` script is only used for setting - /// a few environment variables. To ensure proper de-duplication of the - /// running `Unit`, this uses a stripped-down profile (so that unrelated - /// profile flags don't cause `build.rs` to needlessly run multiple - /// times). - pub fn get_profile_run_custom_build(&self, for_unit_profile: &Profile) -> Profile { - let mut result = Profile::default(); - result.name = for_unit_profile.name; - result.root = for_unit_profile.root; - result.debuginfo = for_unit_profile.debuginfo; - result.opt_level = for_unit_profile.opt_level; - result - } - - /// This returns the base profile. This is currently used for the - /// `[Finished]` line. It is not entirely accurate, since it doesn't - /// select for the package that was actually built. - pub fn base_profile(&self) -> Profile { - let profile_name = if !self.named_profiles_enabled { - match self.requested_profile.as_str() { - "release" | "bench" => self.requested_profile, - _ => InternedString::new("dev"), - } - } else { - self.requested_profile - }; - - let maker = self.get_profile_maker(profile_name).unwrap(); - maker.get_profile(None, true, UnitFor::new_normal()) - } - - /// Gets the directory name for a profile, like `debug` or `release`. - pub fn get_dir_name(&self) -> InternedString { - *self - .dir_names - .get(&self.requested_profile) - .unwrap_or(&self.requested_profile) - } - - /// Used to check for overrides for non-existing packages. - pub fn validate_packages( - &self, - profiles: Option<&TomlProfiles>, - shell: &mut Shell, - resolve: &Resolve, - ) -> CargoResult<()> { - for (name, profile) in &self.by_name { - // If the user did not specify an override, skip this. This is here - // to avoid generating errors for inherited profiles which don't - // specify package overrides. The `by_name` profile has had the inherits - // chain merged, so we need to look at the original source to check - // if an override was specified. - if self - .original_profiles - .get(name) - .and_then(|orig| orig.package.as_ref()) - .is_none() - { - continue; - } - let found = validate_packages_unique(resolve, name, &profile.toml)?; - // We intentionally do not validate unmatched packages for config - // profiles, in case they are defined in a central location. This - // iterates over the manifest profiles only. - if let Some(profiles) = profiles { - if let Some(toml_profile) = profiles.get(name) { - validate_packages_unmatched(shell, resolve, name, toml_profile, &found)?; - } - } - } - Ok(()) - } - - /// Returns the profile maker for the given profile name. - fn get_profile_maker(&self, name: InternedString) -> CargoResult<&ProfileMaker> { - self.by_name - .get(&name) - .ok_or_else(|| anyhow::format_err!("profile `{}` is not defined", name)) - } -} - -/// An object used for handling the profile hierarchy. -/// -/// The precedence of profiles are (first one wins): -/// - Profiles in `.cargo/config` files (using same order as below). -/// - [profile.dev.package.name] -- a named package. -/// - [profile.dev.package."*"] -- this cannot apply to workspace members. -/// - [profile.dev.build-override] -- this can only apply to `build.rs` scripts -/// and their dependencies. -/// - [profile.dev] -/// - Default (hard-coded) values. -#[derive(Debug, Clone)] -struct ProfileMaker { - /// The starting, hard-coded defaults for the profile. - default: Profile, - /// The TOML profile defined in `Cargo.toml` or config. - /// - /// This is None if the user did not specify one, in which case the - /// `default` is used. Note that the built-in defaults for test/bench/doc - /// always set this since they need to declare the `inherits` value. - toml: Option, -} - -impl ProfileMaker { - /// Creates a new `ProfileMaker`. - /// - /// Note that this does not process `inherits`, the caller is responsible for that. - fn new(default: Profile, toml: Option) -> ProfileMaker { - ProfileMaker { default, toml } - } - - /// Generates a new `Profile`. - fn get_profile( - &self, - pkg_id: Option, - is_member: bool, - unit_for: UnitFor, - ) -> Profile { - let mut profile = self.default; - - // First apply profile-specific settings, things like - // `[profile.release]` - if let Some(toml) = &self.toml { - merge_profile(&mut profile, toml); - } - - // Next start overriding those settings. First comes build dependencies - // which default to opt-level 0... - if unit_for.is_for_host() { - // For-host units are things like procedural macros, build scripts, and - // their dependencies. For these units most projects simply want them - // to compile quickly and the runtime doesn't matter too much since - // they tend to process very little data. For this reason we default - // them to a "compile as quickly as possible" mode which for now means - // basically turning down the optimization level and avoid limiting - // codegen units. This ensures that we spend little time optimizing as - // well as enabling parallelism by not constraining codegen units. - profile.opt_level = InternedString::new("0"); - profile.codegen_units = None; - } - // ... and next comes any other sorts of overrides specified in - // profiles, such as `[profile.release.build-override]` or - // `[profile.release.package.foo]` - if let Some(toml) = &self.toml { - merge_toml_overrides(pkg_id, is_member, unit_for, &mut profile, toml); - } - profile - } -} - -/// Merge package and build overrides from the given TOML profile into the given `Profile`. -fn merge_toml_overrides( - pkg_id: Option, - is_member: bool, - unit_for: UnitFor, - profile: &mut Profile, - toml: &TomlProfile, -) { - if unit_for.is_for_host() { - if let Some(build_override) = &toml.build_override { - merge_profile(profile, build_override); - } - } - if let Some(overrides) = toml.package.as_ref() { - if !is_member { - if let Some(all) = overrides.get(&ProfilePackageSpec::All) { - merge_profile(profile, all); - } - } - if let Some(pkg_id) = pkg_id { - let mut matches = overrides - .iter() - .filter_map(|(key, spec_profile)| match *key { - ProfilePackageSpec::All => None, - ProfilePackageSpec::Spec(ref s) => { - if s.matches(pkg_id) { - Some(spec_profile) - } else { - None - } - } - }); - if let Some(spec_profile) = matches.next() { - merge_profile(profile, spec_profile); - // `validate_packages` should ensure that there are - // no additional matches. - assert!( - matches.next().is_none(), - "package `{}` matched multiple package profile overrides", - pkg_id - ); - } - } - } -} - -/// Merge the given TOML profile into the given `Profile`. -/// -/// Does not merge overrides (see `merge_toml_overrides`). -fn merge_profile(profile: &mut Profile, toml: &TomlProfile) { - if let Some(ref opt_level) = toml.opt_level { - profile.opt_level = InternedString::new(&opt_level.0); - } - match toml.lto { - Some(StringOrBool::Bool(b)) => profile.lto = Lto::Bool(b), - Some(StringOrBool::String(ref n)) if is_off(n.as_str()) => profile.lto = Lto::Off, - Some(StringOrBool::String(ref n)) => profile.lto = Lto::Named(InternedString::new(n)), - None => {} - } - if toml.codegen_backend.is_some() { - profile.codegen_backend = toml.codegen_backend; - } - if toml.codegen_units.is_some() { - profile.codegen_units = toml.codegen_units; - } - match toml.debug { - Some(U32OrBool::U32(debug)) => profile.debuginfo = Some(debug), - Some(U32OrBool::Bool(true)) => profile.debuginfo = Some(2), - Some(U32OrBool::Bool(false)) => profile.debuginfo = None, - None => {} - } - if let Some(debug_assertions) = toml.debug_assertions { - profile.debug_assertions = debug_assertions; - } - if let Some(split_debuginfo) = &toml.split_debuginfo { - profile.split_debuginfo = Some(InternedString::new(split_debuginfo)); - } - if let Some(rpath) = toml.rpath { - profile.rpath = rpath; - } - if let Some(panic) = &toml.panic { - profile.panic = match panic.as_str() { - "unwind" => PanicStrategy::Unwind, - "abort" => PanicStrategy::Abort, - // This should be validated in TomlProfile::validate - _ => panic!("Unexpected panic setting `{}`", panic), - }; - } - if let Some(overflow_checks) = toml.overflow_checks { - profile.overflow_checks = overflow_checks; - } - if let Some(incremental) = toml.incremental { - profile.incremental = incremental; - } - profile.strip = match toml.strip { - Some(StringOrBool::Bool(true)) => Strip::Named(InternedString::new("symbols")), - None | Some(StringOrBool::Bool(false)) => Strip::None, - Some(StringOrBool::String(ref n)) if n.as_str() == "none" => Strip::None, - Some(StringOrBool::String(ref n)) => Strip::Named(InternedString::new(n)), - }; -} - -/// The root profile (dev/release). -/// -/// This is currently only used for the `PROFILE` env var for build scripts -/// for backwards compatibility. We should probably deprecate `PROFILE` and -/// encourage using things like `DEBUG` and `OPT_LEVEL` instead. -#[derive(Clone, Copy, Eq, PartialOrd, Ord, PartialEq, Debug)] -pub enum ProfileRoot { - Release, - Debug, -} - -/// Profile settings used to determine which compiler flags to use for a -/// target. -#[derive(Clone, Copy, Eq, PartialOrd, Ord, serde::Serialize)] -pub struct Profile { - pub name: InternedString, - pub opt_level: InternedString, - #[serde(skip)] // named profiles are unstable - pub root: ProfileRoot, - pub lto: Lto, - // `None` means use rustc default. - pub codegen_backend: Option, - // `None` means use rustc default. - pub codegen_units: Option, - pub debuginfo: Option, - pub split_debuginfo: Option, - pub debug_assertions: bool, - pub overflow_checks: bool, - pub rpath: bool, - pub incremental: bool, - pub panic: PanicStrategy, - pub strip: Strip, -} - -impl Default for Profile { - fn default() -> Profile { - Profile { - name: InternedString::new(""), - opt_level: InternedString::new("0"), - root: ProfileRoot::Debug, - lto: Lto::Bool(false), - codegen_backend: None, - codegen_units: None, - debuginfo: None, - debug_assertions: false, - split_debuginfo: None, - overflow_checks: false, - rpath: false, - incremental: false, - panic: PanicStrategy::Unwind, - strip: Strip::None, - } - } -} - -compact_debug! { - impl fmt::Debug for Profile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let (default, default_name) = match self.name.as_str() { - "dev" => (Profile::default_dev(), "default_dev()"), - "release" => (Profile::default_release(), "default_release()"), - _ => (Profile::default(), "default()"), - }; - [debug_the_fields( - name - opt_level - lto - root - codegen_backend - codegen_units - debuginfo - split_debuginfo - debug_assertions - overflow_checks - rpath - incremental - panic - strip - )] - } - } -} - -impl fmt::Display for Profile { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Profile({})", self.name) - } -} - -impl hash::Hash for Profile { - fn hash(&self, state: &mut H) - where - H: hash::Hasher, - { - self.comparable().hash(state); - } -} - -impl cmp::PartialEq for Profile { - fn eq(&self, other: &Self) -> bool { - self.comparable() == other.comparable() - } -} - -impl Profile { - fn default_dev() -> Profile { - Profile { - name: InternedString::new("dev"), - root: ProfileRoot::Debug, - debuginfo: Some(2), - debug_assertions: true, - overflow_checks: true, - incremental: true, - ..Profile::default() - } - } - - fn default_release() -> Profile { - Profile { - name: InternedString::new("release"), - root: ProfileRoot::Release, - opt_level: InternedString::new("3"), - ..Profile::default() - } - } - - // NOTE: Remove the following three once `named_profiles` is default: - - fn default_test() -> Profile { - Profile { - name: InternedString::new("test"), - ..Profile::default_dev() - } - } - - fn default_bench() -> Profile { - Profile { - name: InternedString::new("bench"), - ..Profile::default_release() - } - } - - fn default_doc() -> Profile { - Profile { - name: InternedString::new("doc"), - ..Profile::default_dev() - } - } - - /// Compares all fields except `name`, which doesn't affect compilation. - /// This is necessary for `Unit` deduplication for things like "test" and - /// "dev" which are essentially the same. - fn comparable(&self) -> impl Hash + Eq { - ( - self.opt_level, - self.lto, - self.codegen_backend, - self.codegen_units, - self.debuginfo, - self.split_debuginfo, - self.debug_assertions, - self.overflow_checks, - self.rpath, - self.incremental, - self.panic, - self.strip, - ) - } -} - -/// The link-time-optimization setting. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] -pub enum Lto { - /// Explicitly no LTO, disables thin-LTO. - Off, - /// True = "Fat" LTO - /// False = rustc default (no args), currently "thin LTO" - Bool(bool), - /// Named LTO settings like "thin". - Named(InternedString), -} - -impl serde::ser::Serialize for Lto { - fn serialize(&self, s: S) -> Result - where - S: serde::ser::Serializer, - { - match self { - Lto::Off => "off".serialize(s), - Lto::Bool(b) => b.to_string().serialize(s), - Lto::Named(n) => n.serialize(s), - } - } -} - -/// The `panic` setting. -#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize)] -#[serde(rename_all = "lowercase")] -pub enum PanicStrategy { - Unwind, - Abort, -} - -impl fmt::Display for PanicStrategy { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - PanicStrategy::Unwind => "unwind", - PanicStrategy::Abort => "abort", - } - .fmt(f) - } -} - -/// The setting for choosing which symbols to strip -#[derive( - Clone, Copy, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize, serde::Deserialize, -)] -#[serde(rename_all = "lowercase")] -pub enum Strip { - /// Don't remove any symbols - None, - /// Named Strip settings - Named(InternedString), -} - -impl fmt::Display for Strip { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - Strip::None => "none", - Strip::Named(s) => s.as_str(), - } - .fmt(f) - } -} - -/// Flags used in creating `Unit`s to indicate the purpose for the target, and -/// to ensure the target's dependencies have the correct settings. -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub struct UnitFor { - /// A target for `build.rs` or any of its dependencies, or a proc-macro or - /// any of its dependencies. This enables `build-override` profiles for - /// these targets. - /// - /// An invariant is that if `host_features` is true, `host` must be true. - /// - /// Note that this is `true` for `RunCustomBuild` units, even though that - /// unit should *not* use build-override profiles. This is a bit of a - /// special case. When computing the `RunCustomBuild` unit, it manually - /// uses the `get_profile_run_custom_build` method to get the correct - /// profile information for the unit. `host` needs to be true so that all - /// of the dependencies of that `RunCustomBuild` unit have this flag be - /// sticky (and forced to `true` for all further dependencies) โ€” which is - /// the whole point of `UnitFor`. - host: bool, - /// A target for a build dependency or proc-macro (or any of its - /// dependencies). This is used for computing features of build - /// dependencies and proc-macros independently of other dependency kinds. - /// - /// The subtle difference between this and `host` is that the build script - /// for a non-host package sets this to `false` because it wants the - /// features of the non-host package (whereas `host` is true because the - /// build script is being built for the host). `host_features` becomes - /// `true` for build-dependencies or proc-macros, or any of their - /// dependencies. For example, with this dependency tree: - /// - /// ```text - /// foo - /// โ”œโ”€โ”€ foo build.rs - /// โ”‚ โ””โ”€โ”€ shared_dep (BUILD dependency) - /// โ”‚ โ””โ”€โ”€ shared_dep build.rs - /// โ””โ”€โ”€ shared_dep (Normal dependency) - /// โ””โ”€โ”€ shared_dep build.rs - /// ``` - /// - /// In this example, `foo build.rs` is HOST=true, HOST_FEATURES=false. - /// This is so that `foo build.rs` gets the profile settings for build - /// scripts (HOST=true) and features of foo (HOST_FEATURES=false) because - /// build scripts need to know which features their package is being built - /// with. - /// - /// But in the case of `shared_dep`, when built as a build dependency, - /// both flags are true (it only wants the build-dependency features). - /// When `shared_dep` is built as a normal dependency, then `shared_dep - /// build.rs` is HOST=true, HOST_FEATURES=false for the same reasons that - /// foo's build script is set that way. - host_features: bool, - /// How Cargo processes the `panic` setting or profiles. This is done to - /// handle test/benches inheriting from dev/release, as well as forcing - /// `for_host` units to always unwind. - panic_setting: PanicSetting, -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] -enum PanicSetting { - /// Used to force a unit to always be compiled with the `panic=unwind` - /// strategy, notably for build scripts, proc macros, etc. - AlwaysUnwind, - - /// Indicates that this unit will read its `profile` setting and use - /// whatever is configured there. - ReadProfile, - - /// This unit will ignore its `panic` setting in its profile and will - /// instead inherit it from the `dev` or `release` profile, as appropriate. - Inherit, -} - -impl UnitFor { - /// A unit for a normal target/dependency (i.e., not custom build, - /// proc macro/plugin, or test/bench). - pub fn new_normal() -> UnitFor { - UnitFor { - host: false, - host_features: false, - panic_setting: PanicSetting::ReadProfile, - } - } - - /// A unit for a custom build script or proc-macro or its dependencies. - /// - /// The `host_features` parameter is whether or not this is for a build - /// dependency or proc-macro (something that requires being built "on the - /// host"). Build scripts for non-host units should use `false` because - /// they want to use the features of the package they are running for. - pub fn new_host(host_features: bool) -> UnitFor { - UnitFor { - host: true, - host_features, - // Force build scripts to always use `panic=unwind` for now to - // maximally share dependencies with procedural macros. - panic_setting: PanicSetting::AlwaysUnwind, - } - } - - /// A unit for a compiler plugin or their dependencies. - pub fn new_compiler() -> UnitFor { - UnitFor { - host: false, - // The feature resolver doesn't know which dependencies are - // plugins, so for now plugins don't split features. Since plugins - // are mostly deprecated, just leave this as false. - host_features: false, - // Force plugins to use `panic=abort` so panics in the compiler do - // not abort the process but instead end with a reasonable error - // message that involves catching the panic in the compiler. - panic_setting: PanicSetting::AlwaysUnwind, - } - } - - /// A unit for a test/bench target or their dependencies. - /// - /// Note that `config` is taken here for unstable CLI features to detect - /// whether `panic=abort` is supported for tests. Historical versions of - /// rustc did not support this, but newer versions do with an unstable - /// compiler flag. - pub fn new_test(config: &Config) -> UnitFor { - UnitFor { - host: false, - host_features: false, - // We're testing out an unstable feature (`-Zpanic-abort-tests`) - // which inherits the panic setting from the dev/release profile - // (basically avoid recompiles) but historical defaults required - // that we always unwound. - panic_setting: if config.cli_unstable().panic_abort_tests { - PanicSetting::Inherit - } else { - PanicSetting::AlwaysUnwind - }, - } - } - - /// This is a special case for unit tests of a proc-macro. - /// - /// Proc-macro unit tests are forced to be run on the host. - pub fn new_host_test(config: &Config) -> UnitFor { - let mut unit_for = UnitFor::new_test(config); - unit_for.host = true; - unit_for.host_features = true; - unit_for - } - - /// Returns a new copy updated based on the target dependency. - /// - /// This is where the magic happens that the host/host_features settings - /// transition in a sticky fashion. As the dependency graph is being - /// built, once those flags are set, they stay set for the duration of - /// that portion of tree. - pub fn with_dependency(self, parent: &Unit, dep_target: &Target) -> UnitFor { - // A build script or proc-macro transitions this to being built for the host. - let dep_for_host = dep_target.for_host(); - // This is where feature decoupling of host versus target happens. - // - // Once host features are desired, they are always desired. - // - // A proc-macro should always use host features. - // - // Dependencies of a build script should use host features (subtle - // point: the build script itself does *not* use host features, that's - // why the parent is checked here, and not the dependency). - let host_features = - self.host_features || parent.target.is_custom_build() || dep_target.proc_macro(); - // Build scripts and proc macros, and all of their dependencies are - // AlwaysUnwind. - let panic_setting = if dep_for_host { - PanicSetting::AlwaysUnwind - } else { - self.panic_setting - }; - UnitFor { - host: self.host || dep_for_host, - host_features, - panic_setting, - } - } - - /// Returns `true` if this unit is for a build script or any of its - /// dependencies, or a proc macro or any of its dependencies. - pub fn is_for_host(&self) -> bool { - self.host - } - - pub fn is_for_host_features(&self) -> bool { - self.host_features - } - - /// Returns how `panic` settings should be handled for this profile - fn panic_setting(&self) -> PanicSetting { - self.panic_setting - } - - /// All possible values, used by `clean`. - pub fn all_values() -> &'static [UnitFor] { - static ALL: &[UnitFor] = &[ - UnitFor { - host: false, - host_features: false, - panic_setting: PanicSetting::ReadProfile, - }, - UnitFor { - host: true, - host_features: false, - panic_setting: PanicSetting::AlwaysUnwind, - }, - UnitFor { - host: false, - host_features: false, - panic_setting: PanicSetting::AlwaysUnwind, - }, - UnitFor { - host: false, - host_features: false, - panic_setting: PanicSetting::Inherit, - }, - // host_features=true must always have host=true - // `Inherit` is not used in build dependencies. - UnitFor { - host: true, - host_features: true, - panic_setting: PanicSetting::ReadProfile, - }, - UnitFor { - host: true, - host_features: true, - panic_setting: PanicSetting::AlwaysUnwind, - }, - ]; - ALL - } - - pub(crate) fn map_to_features_for(&self) -> FeaturesFor { - FeaturesFor::from_for_host(self.is_for_host_features()) - } -} - -/// Takes the manifest profiles, and overlays the config profiles on-top. -/// -/// Returns a new copy of the profile map with all the mergers complete. -fn merge_config_profiles( - ws: &Workspace<'_>, - requested_profile: InternedString, -) -> CargoResult> { - let mut profiles = match ws.profiles() { - Some(profiles) => profiles.get_all().clone(), - None => BTreeMap::new(), - }; - // Set of profile names to check if defined in config only. - let mut check_to_add = HashSet::new(); - check_to_add.insert(requested_profile); - // Merge config onto manifest profiles. - for (name, profile) in &mut profiles { - if let Some(config_profile) = get_config_profile(ws, name)? { - profile.merge(&config_profile); - } - if let Some(inherits) = &profile.inherits { - check_to_add.insert(*inherits); - } - } - // Add the built-in profiles. This is important for things like `cargo - // test` which implicitly use the "dev" profile for dependencies. - for name in &["dev", "release", "test", "bench"] { - check_to_add.insert(InternedString::new(name)); - } - // Add config-only profiles. - // Need to iterate repeatedly to get all the inherits values. - let mut current = HashSet::new(); - while !check_to_add.is_empty() { - std::mem::swap(&mut current, &mut check_to_add); - for name in current.drain() { - if !profiles.contains_key(&name) { - if let Some(config_profile) = get_config_profile(ws, &name)? { - if let Some(inherits) = &config_profile.inherits { - check_to_add.insert(*inherits); - } - profiles.insert(name, config_profile); - } - } - } - } - Ok(profiles) -} - -/// Helper for fetching a profile from config. -fn get_config_profile(ws: &Workspace<'_>, name: &str) -> CargoResult> { - let profile: Option> = - ws.config().get(&format!("profile.{}", name))?; - let profile = match profile { - Some(profile) => profile, - None => return Ok(None), - }; - let mut warnings = Vec::new(); - profile - .val - .validate(name, ws.unstable_features(), &mut warnings) - .with_context(|| { - format!( - "config profile `{}` is not valid (defined in `{}`)", - name, profile.definition - ) - })?; - for warning in warnings { - ws.config().shell().warn(warning)?; - } - Ok(Some(profile.val)) -} - -/// Validate that a package does not match multiple package override specs. -/// -/// For example `[profile.dev.package.bar]` and `[profile.dev.package."bar:0.5.0"]` -/// would both match `bar:0.5.0` which would be ambiguous. -fn validate_packages_unique( - resolve: &Resolve, - name: &str, - toml: &Option, -) -> CargoResult> { - let toml = match toml { - Some(ref toml) => toml, - None => return Ok(HashSet::new()), - }; - let overrides = match toml.package.as_ref() { - Some(overrides) => overrides, - None => return Ok(HashSet::new()), - }; - // Verify that a package doesn't match multiple spec overrides. - let mut found = HashSet::new(); - for pkg_id in resolve.iter() { - let matches: Vec<&PackageIdSpec> = overrides - .keys() - .filter_map(|key| match *key { - ProfilePackageSpec::All => None, - ProfilePackageSpec::Spec(ref spec) => { - if spec.matches(pkg_id) { - Some(spec) - } else { - None - } - } - }) - .collect(); - match matches.len() { - 0 => {} - 1 => { - found.insert(matches[0].clone()); - } - _ => { - let specs = matches - .iter() - .map(|spec| spec.to_string()) - .collect::>() - .join(", "); - bail!( - "multiple package overrides in profile `{}` match package `{}`\n\ - found package specs: {}", - name, - pkg_id, - specs - ); - } - } - } - Ok(found) -} - -/// Check for any profile override specs that do not match any known packages. -/// -/// This helps check for typos and mistakes. -fn validate_packages_unmatched( - shell: &mut Shell, - resolve: &Resolve, - name: &str, - toml: &TomlProfile, - found: &HashSet, -) -> CargoResult<()> { - let overrides = match toml.package.as_ref() { - Some(overrides) => overrides, - None => return Ok(()), - }; - - // Verify every override matches at least one package. - let missing_specs = overrides.keys().filter_map(|key| { - if let ProfilePackageSpec::Spec(ref spec) = *key { - if !found.contains(spec) { - return Some(spec); - } - } - None - }); - for spec in missing_specs { - // See if there is an exact name match. - let name_matches: Vec = resolve - .iter() - .filter_map(|pkg_id| { - if pkg_id.name() == spec.name() { - Some(pkg_id.to_string()) - } else { - None - } - }) - .collect(); - if name_matches.is_empty() { - let suggestion = closest_msg(&spec.name(), resolve.iter(), |p| p.name().as_str()); - shell.warn(format!( - "profile package spec `{}` in profile `{}` did not match any packages{}", - spec, name, suggestion - ))?; - } else { - shell.warn(format!( - "profile package spec `{}` in profile `{}` \ - has a version or URL that does not match any of the packages: {}", - spec, - name, - name_matches.join(", ") - ))?; - } - } - Ok(()) -} - -/// Returns `true` if a string is a toggle that turns an option off. -fn is_off(s: &str) -> bool { - matches!(s, "off" | "n" | "no" | "none") -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/registry.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/registry.rs deleted file mode 100644 index 47a117967..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/registry.rs +++ /dev/null @@ -1,884 +0,0 @@ -use std::collections::{HashMap, HashSet}; - -use crate::core::PackageSet; -use crate::core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary}; -use crate::sources::config::SourceConfigMap; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::{profile, CanonicalUrl, Config}; -use anyhow::{bail, Context as _}; -use log::{debug, trace}; -use url::Url; - -/// Source of information about a group of packages. -/// -/// See also `core::Source`. -pub trait Registry { - /// Attempt to find the packages that match a dependency request. - fn query( - &mut self, - dep: &Dependency, - f: &mut dyn FnMut(Summary), - fuzzy: bool, - ) -> CargoResult<()>; - - fn query_vec(&mut self, dep: &Dependency, fuzzy: bool) -> CargoResult> { - let mut ret = Vec::new(); - self.query(dep, &mut |s| ret.push(s), fuzzy)?; - Ok(ret) - } - - fn describe_source(&self, source: SourceId) -> String; - fn is_replaced(&self, source: SourceId) -> bool; -} - -/// This structure represents a registry of known packages. It internally -/// contains a number of `Box` instances which are used to load a -/// `Package` from. -/// -/// The resolution phase of Cargo uses this to drive knowledge about new -/// packages as well as querying for lists of new packages. It is here that -/// sources are updated (e.g., network operations) and overrides are -/// handled. -/// -/// The general idea behind this registry is that it is centered around the -/// `SourceMap` structure, contained within which is a mapping of a `SourceId` to -/// a `Source`. Each `Source` in the map has been updated (using network -/// operations if necessary) and is ready to be queried for packages. -pub struct PackageRegistry<'cfg> { - config: &'cfg Config, - sources: SourceMap<'cfg>, - - // A list of sources which are considered "overrides" which take precedent - // when querying for packages. - overrides: Vec, - - // Note that each SourceId does not take into account its `precise` field - // when hashing or testing for equality. When adding a new `SourceId`, we - // want to avoid duplicates in the `SourceMap` (to prevent re-updating the - // same git repo twice for example), but we also want to ensure that the - // loaded source is always updated. - // - // Sources with a `precise` field normally don't need to be updated because - // their contents are already on disk, but sources without a `precise` field - // almost always need to be updated. If we have a cached `Source` for a - // precise `SourceId`, then when we add a new `SourceId` that is not precise - // we want to ensure that the underlying source is updated. - // - // This is basically a long-winded way of saying that we want to know - // precisely what the keys of `sources` are, so this is a mapping of key to - // what exactly the key is. - source_ids: HashMap, - - locked: LockedMap, - yanked_whitelist: HashSet, - source_config: SourceConfigMap<'cfg>, - - patches: HashMap>, - patches_locked: bool, - patches_available: HashMap>, -} - -/// A map of all "locked packages" which is filled in when parsing a lock file -/// and is used to guide dependency resolution by altering summaries as they're -/// queried from this source. -/// -/// This map can be thought of as a glorified `Vec` where `MySummary` -/// has a `PackageId` for which package it represents as well as a list of -/// `PackageId` for the resolved dependencies. The hash map is otherwise -/// structured though for easy access throughout this registry. -type LockedMap = HashMap< - // The first level of key-ing done in this hash map is the source that - // dependencies come from, identified by a `SourceId`. - // The next level is keyed by the name of the package... - (SourceId, InternedString), - // ... and the value here is a list of tuples. The first element of each - // tuple is a package which has the source/name used to get to this - // point. The second element of each tuple is the list of locked - // dependencies that the first element has. - Vec<(PackageId, Vec)>, ->; - -#[derive(PartialEq, Eq, Clone, Copy)] -enum Kind { - Override, - Locked, - Normal, -} - -/// Argument to `PackageRegistry::patch` which is information about a `[patch]` -/// directive that we found in a lockfile, if present. -pub struct LockedPatchDependency { - /// The original `Dependency` directive, except "locked" so it's version - /// requirement is `=foo` and its `SourceId` has a "precise" listed. - pub dependency: Dependency, - /// The `PackageId` that was previously found in a lock file which - /// `dependency` matches. - pub package_id: PackageId, - /// Something only used for backwards compatibility with the v2 lock file - /// format where `branch=master` is considered the same as `DefaultBranch`. - /// For more comments on this see the code in `ops/resolve.rs`. - pub alt_package_id: Option, -} - -impl<'cfg> PackageRegistry<'cfg> { - pub fn new(config: &'cfg Config) -> CargoResult> { - let source_config = SourceConfigMap::new(config)?; - Ok(PackageRegistry { - config, - sources: SourceMap::new(), - source_ids: HashMap::new(), - overrides: Vec::new(), - source_config, - locked: HashMap::new(), - yanked_whitelist: HashSet::new(), - patches: HashMap::new(), - patches_locked: false, - patches_available: HashMap::new(), - }) - } - - pub fn get(self, package_ids: &[PackageId]) -> CargoResult> { - trace!("getting packages; sources={}", self.sources.len()); - PackageSet::new(package_ids, self.sources, self.config) - } - - fn ensure_loaded(&mut self, namespace: SourceId, kind: Kind) -> CargoResult<()> { - match self.source_ids.get(&namespace) { - // We've previously loaded this source, and we've already locked it, - // so we're not allowed to change it even if `namespace` has a - // slightly different precise version listed. - Some((_, Kind::Locked)) => { - debug!("load/locked {}", namespace); - return Ok(()); - } - - // If the previous source was not a precise source, then we can be - // sure that it's already been updated if we've already loaded it. - Some((previous, _)) if previous.precise().is_none() => { - debug!("load/precise {}", namespace); - return Ok(()); - } - - // If the previous source has the same precise version as we do, - // then we're done, otherwise we need to need to move forward - // updating this source. - Some((previous, _)) => { - if previous.precise() == namespace.precise() { - debug!("load/match {}", namespace); - return Ok(()); - } - debug!("load/mismatch {}", namespace); - } - None => { - debug!("load/missing {}", namespace); - } - } - - self.load(namespace, kind)?; - Ok(()) - } - - pub fn add_sources(&mut self, ids: impl IntoIterator) -> CargoResult<()> { - for id in ids { - self.ensure_loaded(id, Kind::Locked)?; - } - Ok(()) - } - - pub fn add_preloaded(&mut self, source: Box) { - self.add_source(source, Kind::Locked); - } - - fn add_source(&mut self, source: Box, kind: Kind) { - let id = source.source_id(); - self.sources.insert(source); - self.source_ids.insert(id, (id, kind)); - } - - pub fn add_override(&mut self, source: Box) { - self.overrides.push(source.source_id()); - self.add_source(source, Kind::Override); - } - - pub fn add_to_yanked_whitelist(&mut self, iter: impl Iterator) { - let pkgs = iter.collect::>(); - for (_, source) in self.sources.sources_mut() { - source.add_to_yanked_whitelist(&pkgs); - } - self.yanked_whitelist.extend(pkgs); - } - - /// remove all residual state from previous lock files. - pub fn clear_lock(&mut self) { - trace!("clear_lock"); - self.locked = HashMap::new(); - } - - pub fn register_lock(&mut self, id: PackageId, deps: Vec) { - trace!("register_lock: {}", id); - for dep in deps.iter() { - trace!("\t-> {}", dep); - } - let sub_vec = self - .locked - .entry((id.source_id(), id.name())) - .or_insert_with(Vec::new); - sub_vec.push((id, deps)); - } - - /// Insert a `[patch]` section into this registry. - /// - /// This method will insert a `[patch]` section for the `url` specified, - /// with the given list of dependencies. The `url` specified is the URL of - /// the source to patch (for example this is `crates-io` in the manifest). - /// The `deps` is an array of all the entries in the `[patch]` section of - /// the manifest. - /// - /// Here the `deps` will be resolved to a precise version and stored - /// internally for future calls to `query` below. `deps` should be a tuple - /// where the first element is the patch definition straight from the - /// manifest, and the second element is an optional variant where the - /// patch has been locked. This locked patch is the patch locked to - /// a specific version found in Cargo.lock. This will be `None` if - /// `Cargo.lock` doesn't exist, or the patch did not match any existing - /// entries in `Cargo.lock`. - /// - /// Note that the patch list specified here *will not* be available to - /// `query` until `lock_patches` is called below, which should be called - /// once all patches have been added. - /// - /// The return value is a `Vec` of patches that should *not* be locked. - /// This happens when the patch is locked, but the patch has been updated - /// so the locked value is no longer correct. - pub fn patch( - &mut self, - url: &Url, - deps: &[(&Dependency, Option)], - ) -> CargoResult> { - // NOTE: None of this code is aware of required features. If a patch - // is missing a required feature, you end up with an "unused patch" - // warning, which is very hard to understand. Ideally the warning - // would be tailored to indicate *why* it is unused. - let canonical = CanonicalUrl::new(url)?; - - // Return value of patches that shouldn't be locked. - let mut unlock_patches = Vec::new(); - - // First up we need to actually resolve each `deps` specification to - // precisely one summary. We're not using the `query` method below as it - // internally uses maps we're building up as part of this method - // (`patches_available` and `patches). Instead we're going straight to - // the source to load information from it. - // - // Remember that each dependency listed in `[patch]` has to resolve to - // precisely one package, so that's why we're just creating a flat list - // of summaries which should be the same length as `deps` above. - let unlocked_summaries = deps - .iter() - .map(|(orig_patch, locked)| { - // Remove double reference in orig_patch. Is there maybe a - // magic pattern that could avoid this? - let orig_patch = *orig_patch; - // Use the locked patch if it exists, otherwise use the original. - let dep = match locked { - Some(lock) => &lock.dependency, - None => orig_patch, - }; - debug!( - "registering a patch for `{}` with `{}`", - url, - dep.package_name() - ); - - if dep.features().len() != 0 || !dep.uses_default_features() { - self.source_config.config().shell().warn(format!( - "patch for `{}` uses the features mechanism. \ - default-features and features will not take effect because the patch dependency does not support this mechanism", - dep.package_name() - ))?; - } - - // Go straight to the source for resolving `dep`. Load it as we - // normally would and then ask it directly for the list of summaries - // corresponding to this `dep`. - self.ensure_loaded(dep.source_id(), Kind::Normal) - .with_context(|| { - format!( - "failed to load source for dependency `{}`", - dep.package_name() - ) - })?; - - let source = self - .sources - .get_mut(dep.source_id()) - .expect("loaded source not present"); - let summaries = source.query_vec(dep)?; - let (summary, should_unlock) = summary_for_patch( - orig_patch, locked, summaries, source, - ) - .with_context(|| { - format!( - "patch for `{}` in `{}` failed to resolve", - orig_patch.package_name(), - url, - ) - })?; - debug!( - "patch summary is {:?} should_unlock={:?}", - summary, should_unlock - ); - if let Some(unlock_id) = should_unlock { - unlock_patches.push((orig_patch.clone(), unlock_id)); - } - - if *summary.package_id().source_id().canonical_url() == canonical { - anyhow::bail!( - "patch for `{}` in `{}` points to the same source, but \ - patches must point to different sources", - dep.package_name(), - url - ); - } - Ok(summary) - }) - .collect::>>() - .with_context(|| format!("failed to resolve patches for `{}`", url))?; - - let mut name_and_version = HashSet::new(); - for summary in unlocked_summaries.iter() { - let name = summary.package_id().name(); - let version = summary.package_id().version(); - if !name_and_version.insert((name, version)) { - bail!( - "cannot have two `[patch]` entries which both resolve \ - to `{} v{}`", - name, - version - ); - } - } - - // Calculate a list of all patches available for this source which is - // then used later during calls to `lock` to rewrite summaries to point - // directly at these patched entries. - // - // Note that this is somewhat subtle where the list of `ids` for a - // canonical URL is extend with possibly two ids per summary. This is done - // to handle the transition from the v2->v3 lock file format where in - // v2 DefeaultBranch was either DefaultBranch or Branch("master") for - // git dependencies. In this case if `summary.package_id()` is - // Branch("master") then alt_package_id will be DefaultBranch. This - // signifies that there's a patch available for either of those - // dependency directives if we see them in the dependency graph. - // - // This is a bit complicated and hopefully an edge case we can remove - // in the future, but for now it hopefully doesn't cause too much - // harm... - let mut ids = Vec::new(); - for (summary, (_, lock)) in unlocked_summaries.iter().zip(deps) { - ids.push(summary.package_id()); - if let Some(lock) = lock { - ids.extend(lock.alt_package_id); - } - } - self.patches_available.insert(canonical.clone(), ids); - - // Note that we do not use `lock` here to lock summaries! That step - // happens later once `lock_patches` is invoked. In the meantime though - // we want to fill in the `patches_available` map (later used in the - // `lock` method) and otherwise store the unlocked summaries in - // `patches` to get locked in a future call to `lock_patches`. - self.patches.insert(canonical, unlocked_summaries); - - Ok(unlock_patches) - } - - /// Lock all patch summaries added via `patch`, making them available to - /// resolution via `query`. - /// - /// This function will internally `lock` each summary added via `patch` - /// above now that the full set of `patch` packages are known. This'll allow - /// us to correctly resolve overridden dependencies between patches - /// hopefully! - pub fn lock_patches(&mut self) { - assert!(!self.patches_locked); - for summaries in self.patches.values_mut() { - for summary in summaries { - debug!("locking patch {:?}", summary); - *summary = lock(&self.locked, &self.patches_available, summary.clone()); - } - } - self.patches_locked = true; - } - - /// Gets all patches grouped by the source URLS they are going to patch. - /// - /// These patches are mainly collected from [`patch`](Self::patch). - /// They might not be the same as patches actually used during dependency resolving. - pub fn patches(&self) -> &HashMap> { - &self.patches - } - - fn load(&mut self, source_id: SourceId, kind: Kind) -> CargoResult<()> { - (|| { - debug!("loading source {}", source_id); - let source = self.source_config.load(source_id, &self.yanked_whitelist)?; - assert_eq!(source.source_id(), source_id); - - if kind == Kind::Override { - self.overrides.push(source_id); - } - self.add_source(source, kind); - - // Ensure the source has fetched all necessary remote data. - let _p = profile::start(format!("updating: {}", source_id)); - self.sources.get_mut(source_id).unwrap().update() - })() - .with_context(|| format!("Unable to update {}", source_id))?; - Ok(()) - } - - fn query_overrides(&mut self, dep: &Dependency) -> CargoResult> { - for &s in self.overrides.iter() { - let src = self.sources.get_mut(s).unwrap(); - let dep = Dependency::new_override(dep.package_name(), s); - let mut results = src.query_vec(&dep)?; - if !results.is_empty() { - return Ok(Some(results.remove(0))); - } - } - Ok(None) - } - - /// This function is used to transform a summary to another locked summary - /// if possible. This is where the concept of a lock file comes into play. - /// - /// If a summary points at a package ID which was previously locked, then we - /// override the summary's ID itself, as well as all dependencies, to be - /// rewritten to the locked versions. This will transform the summary's - /// source to a precise source (listed in the locked version) as well as - /// transforming all of the dependencies from range requirements on - /// imprecise sources to exact requirements on precise sources. - /// - /// If a summary does not point at a package ID which was previously locked, - /// or if any dependencies were added and don't have a previously listed - /// version, we still want to avoid updating as many dependencies as - /// possible to keep the graph stable. In this case we map all of the - /// summary's dependencies to be rewritten to a locked version wherever - /// possible. If we're unable to map a dependency though, we just pass it on - /// through. - pub fn lock(&self, summary: Summary) -> Summary { - assert!(self.patches_locked); - lock(&self.locked, &self.patches_available, summary) - } - - fn warn_bad_override( - &self, - override_summary: &Summary, - real_summary: &Summary, - ) -> CargoResult<()> { - let mut real_deps = real_summary.dependencies().iter().collect::>(); - - let boilerplate = "\ -This is currently allowed but is known to produce buggy behavior with spurious -recompiles and changes to the crate graph. Path overrides unfortunately were -never intended to support this feature, so for now this message is just a -warning. In the future, however, this message will become a hard error. - -To change the dependency graph via an override it's recommended to use the -`[patch]` feature of Cargo instead of the path override feature. This is -documented online at the url below for more information. - -https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html -"; - - for dep in override_summary.dependencies() { - if let Some(i) = real_deps.iter().position(|d| dep == *d) { - real_deps.remove(i); - continue; - } - let msg = format!( - "path override for crate `{}` has altered the original list of\n\ - dependencies; the dependency on `{}` was either added or\n\ - modified to not match the previously resolved version\n\n\ - {}", - override_summary.package_id().name(), - dep.package_name(), - boilerplate - ); - self.source_config.config().shell().warn(&msg)?; - return Ok(()); - } - - if let Some(dep) = real_deps.get(0) { - let msg = format!( - "path override for crate `{}` has altered the original list of\n\ - dependencies; the dependency on `{}` was removed\n\n\ - {}", - override_summary.package_id().name(), - dep.package_name(), - boilerplate - ); - self.source_config.config().shell().warn(&msg)?; - return Ok(()); - } - - Ok(()) - } -} - -impl<'cfg> Registry for PackageRegistry<'cfg> { - fn query( - &mut self, - dep: &Dependency, - f: &mut dyn FnMut(Summary), - fuzzy: bool, - ) -> CargoResult<()> { - assert!(self.patches_locked); - let (override_summary, n, to_warn) = { - // Look for an override and get ready to query the real source. - let override_summary = self.query_overrides(dep)?; - - // Next up on our list of candidates is to check the `[patch]` - // section of the manifest. Here we look through all patches - // relevant to the source that `dep` points to, and then we match - // name/version. Note that we don't use `dep.matches(..)` because - // the patches, by definition, come from a different source. - // This means that `dep.matches(..)` will always return false, when - // what we really care about is the name/version match. - let mut patches = Vec::::new(); - if let Some(extra) = self.patches.get(dep.source_id().canonical_url()) { - patches.extend( - extra - .iter() - .filter(|s| dep.matches_ignoring_source(s.package_id())) - .cloned(), - ); - } - - // A crucial feature of the `[patch]` feature is that we *don't* - // query the actual registry if we have a "locked" dependency. A - // locked dep basically just means a version constraint of `=a.b.c`, - // and because patches take priority over the actual source then if - // we have a candidate we're done. - if patches.len() == 1 && dep.is_locked() { - let patch = patches.remove(0); - match override_summary { - Some(summary) => (summary, 1, Some(patch)), - None => { - f(patch); - return Ok(()); - } - } - } else { - if !patches.is_empty() { - debug!( - "found {} patches with an unlocked dep on `{}` at {} \ - with `{}`, \ - looking at sources", - patches.len(), - dep.package_name(), - dep.source_id(), - dep.version_req() - ); - } - - // Ensure the requested source_id is loaded - self.ensure_loaded(dep.source_id(), Kind::Normal) - .with_context(|| { - format!( - "failed to load source for dependency `{}`", - dep.package_name() - ) - })?; - - let source = self.sources.get_mut(dep.source_id()); - match (override_summary, source) { - (Some(_), None) => anyhow::bail!("override found but no real ones"), - (None, None) => return Ok(()), - - // If we don't have an override then we just ship - // everything upstairs after locking the summary - (None, Some(source)) => { - for patch in patches.iter() { - f(patch.clone()); - } - - // Our sources shouldn't ever come back to us with two - // summaries that have the same version. We could, - // however, have an `[patch]` section which is in use - // to override a version in the registry. This means - // that if our `summary` in this loop has the same - // version as something in `patches` that we've - // already selected, then we skip this `summary`. - let locked = &self.locked; - let all_patches = &self.patches_available; - let callback = &mut |summary: Summary| { - for patch in patches.iter() { - let patch = patch.package_id().version(); - if summary.package_id().version() == patch { - return; - } - } - f(lock(locked, all_patches, summary)) - }; - return if fuzzy { - source.fuzzy_query(dep, callback) - } else { - source.query(dep, callback) - }; - } - - // If we have an override summary then we query the source - // to sanity check its results. We don't actually use any of - // the summaries it gives us though. - (Some(override_summary), Some(source)) => { - if !patches.is_empty() { - anyhow::bail!("found patches and a path override") - } - let mut n = 0; - let mut to_warn = None; - { - let callback = &mut |summary| { - n += 1; - to_warn = Some(summary); - }; - if fuzzy { - source.fuzzy_query(dep, callback)?; - } else { - source.query(dep, callback)?; - } - } - (override_summary, n, to_warn) - } - } - } - }; - - if n > 1 { - anyhow::bail!("found an override with a non-locked list"); - } else if let Some(summary) = to_warn { - self.warn_bad_override(&override_summary, &summary)?; - } - f(self.lock(override_summary)); - Ok(()) - } - - fn describe_source(&self, id: SourceId) -> String { - match self.sources.get(id) { - Some(src) => src.describe(), - None => id.to_string(), - } - } - - fn is_replaced(&self, id: SourceId) -> bool { - match self.sources.get(id) { - Some(src) => src.is_replaced(), - None => false, - } - } -} - -fn lock( - locked: &LockedMap, - patches: &HashMap>, - summary: Summary, -) -> Summary { - let pair = locked - .get(&(summary.source_id(), summary.name())) - .and_then(|vec| vec.iter().find(|&&(id, _)| id == summary.package_id())); - - trace!("locking summary of {}", summary.package_id()); - - // Lock the summary's ID if possible - let summary = match pair { - Some((precise, _)) => summary.override_id(*precise), - None => summary, - }; - summary.map_dependencies(|dep| { - trace!( - "\t{}/{}/{}", - dep.package_name(), - dep.version_req(), - dep.source_id() - ); - - // If we've got a known set of overrides for this summary, then - // one of a few cases can arise: - // - // 1. We have a lock entry for this dependency from the same - // source as it's listed as coming from. In this case we make - // sure to lock to precisely the given package ID. - // - // 2. We have a lock entry for this dependency, but it's from a - // different source than what's listed, or the version - // requirement has changed. In this case we must discard the - // locked version because the dependency needs to be - // re-resolved. - // - // 3. We have a lock entry for this dependency, but it's from a - // different source than what's listed. This lock though happens - // through `[patch]`, so we want to preserve it. - // - // 4. We don't have a lock entry for this dependency, in which - // case it was likely an optional dependency which wasn't - // included previously so we just pass it through anyway. - // - // Cases 1/2 are handled by `matches_id`, case 3 is handled specially, - // and case 4 is handled by falling through to the logic below. - if let Some((_, locked_deps)) = pair { - let locked = locked_deps.iter().find(|&&id| { - // If the dependency matches the package id exactly then we've - // found a match, this is the id the dependency was previously - // locked to. - if dep.matches_id(id) { - return true; - } - - // If the name/version doesn't match, then we definitely don't - // have a match whatsoever. Otherwise we need to check - // `[patch]`... - if !dep.matches_ignoring_source(id) { - return false; - } - - // ... so here we look up the dependency url in the patches - // map, and we see if `id` is contained in the list of patches - // for that url. If it is then this lock is still valid, - // otherwise the lock is no longer valid. - match patches.get(dep.source_id().canonical_url()) { - Some(list) => list.contains(&id), - None => false, - } - }); - - if let Some(&locked) = locked { - trace!("\tfirst hit on {}", locked); - let mut dep = dep; - - // If we found a locked version where the sources match, then - // we can `lock_to` to get an exact lock on this dependency. - // Otherwise we got a lock via `[patch]` so we only lock the - // version requirement, not the source. - if locked.source_id() == dep.source_id() { - dep.lock_to(locked); - } else { - dep.lock_version(locked.version()); - } - return dep; - } - } - - // If this dependency did not have a locked version, then we query - // all known locked packages to see if they match this dependency. - // If anything does then we lock it to that and move on. - let v = locked - .get(&(dep.source_id(), dep.package_name())) - .and_then(|vec| vec.iter().find(|&&(id, _)| dep.matches_id(id))); - if let Some(&(id, _)) = v { - trace!("\tsecond hit on {}", id); - let mut dep = dep; - dep.lock_to(id); - return dep; - } - - trace!("\tnope, unlocked"); - dep - }) -} - -/// This is a helper for selecting the summary, or generating a helpful error message. -fn summary_for_patch( - orig_patch: &Dependency, - locked: &Option, - mut summaries: Vec, - source: &mut dyn Source, -) -> CargoResult<(Summary, Option)> { - if summaries.len() == 1 { - return Ok((summaries.pop().unwrap(), None)); - } - if summaries.len() > 1 { - // TODO: In the future, it might be nice to add all of these - // candidates so that version selection would just pick the - // appropriate one. However, as this is currently structured, if we - // added these all as patches, the unselected versions would end up in - // the "unused patch" listing, and trigger a warning. It might take a - // fair bit of restructuring to make that work cleanly, and there - // isn't any demand at this time to support that. - let mut vers: Vec<_> = summaries.iter().map(|summary| summary.version()).collect(); - vers.sort(); - let versions: Vec<_> = vers.into_iter().map(|v| v.to_string()).collect(); - anyhow::bail!( - "patch for `{}` in `{}` resolved to more than one candidate\n\ - Found versions: {}\n\ - Update the patch definition to select only one package.\n\ - For example, add an `=` version requirement to the patch definition, \ - such as `version = \"={}\"`.", - orig_patch.package_name(), - orig_patch.source_id(), - versions.join(", "), - versions.last().unwrap() - ); - } - assert!(summaries.is_empty()); - // No summaries found, try to help the user figure out what is wrong. - if let Some(locked) = locked { - // Since the locked patch did not match anything, try the unlocked one. - let orig_matches = source.query_vec(orig_patch).unwrap_or_else(|e| { - log::warn!( - "could not determine unlocked summaries for dep {:?}: {:?}", - orig_patch, - e - ); - Vec::new() - }); - let (summary, _) = summary_for_patch(orig_patch, &None, orig_matches, source)?; - // The unlocked version found a match. This returns a value to - // indicate that this entry should be unlocked. - return Ok((summary, Some(locked.package_id))); - } - // Try checking if there are *any* packages that match this by name. - let name_only_dep = Dependency::new_override(orig_patch.package_name(), orig_patch.source_id()); - let name_summaries = source.query_vec(&name_only_dep).unwrap_or_else(|e| { - log::warn!( - "failed to do name-only summary query for {:?}: {:?}", - name_only_dep, - e - ); - Vec::new() - }); - let mut vers = name_summaries - .iter() - .map(|summary| summary.version()) - .collect::>(); - let found = match vers.len() { - 0 => format!(""), - 1 => format!("version `{}`", vers[0]), - _ => { - vers.sort(); - let strs: Vec<_> = vers.into_iter().map(|v| v.to_string()).collect(); - format!("versions `{}`", strs.join(", ")) - } - }; - if found.is_empty() { - anyhow::bail!( - "The patch location `{}` does not appear to contain any packages \ - matching the name `{}`.", - orig_patch.source_id(), - orig_patch.package_name() - ); - } else { - anyhow::bail!( - "The patch location `{}` contains a `{}` package with {}, but the patch \ - definition requires `{}`.\n\ - Check that the version in the patch location is what you expect, \ - and update the patch definition to match.", - orig_patch.source_id(), - orig_patch.package_name(), - found, - orig_patch.version_req() - ); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/conflict_cache.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/conflict_cache.rs deleted file mode 100644 index 10c41761d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/conflict_cache.rs +++ /dev/null @@ -1,225 +0,0 @@ -use std::collections::{BTreeMap, HashMap, HashSet}; - -use log::trace; - -use super::types::ConflictMap; -use crate::core::resolver::Context; -use crate::core::{Dependency, PackageId}; - -/// This is a trie for storing a large number of sets designed to -/// efficiently see if any of the stored sets are a subset of a search set. -enum ConflictStoreTrie { - /// One of the stored sets. - Leaf(ConflictMap), - /// A map from an element to a subtrie where - /// all the sets in the subtrie contains that element. - Node(BTreeMap), -} - -impl ConflictStoreTrie { - /// Finds any known set of conflicts, if any, - /// where all elements return some from `is_active` and contain `PackageId` specified. - /// If more than one are activated, then it will return - /// one that will allow for the most jump-back. - fn find( - &self, - is_active: &impl Fn(PackageId) -> Option, - must_contain: Option, - mut max_age: usize, - ) -> Option<(&ConflictMap, usize)> { - match self { - ConflictStoreTrie::Leaf(c) => { - if must_contain.is_none() { - Some((c, 0)) - } else { - // We did not find `must_contain`, so we need to keep looking. - None - } - } - ConflictStoreTrie::Node(m) => { - let mut out = None; - for (&pid, store) in must_contain - .map(|f| m.range(..=f)) - .unwrap_or_else(|| m.range(..)) - { - // If the key is active, then we need to check all of the corresponding subtrie. - if let Some(age_this) = is_active(pid) { - if age_this >= max_age && must_contain != Some(pid) { - // not worth looking at, it is to old. - continue; - } - if let Some((o, age_o)) = - store.find(is_active, must_contain.filter(|&f| f != pid), max_age) - { - let age = if must_contain == Some(pid) { - // all the results will include `must_contain` - // so the age of must_contain is not relevant to find the best result. - age_o - } else { - std::cmp::max(age_this, age_o) - }; - if max_age > age { - // we found one that can jump-back further so replace the out. - out = Some((o, age)); - // and don't look at anything older - max_age = age - } - } - } - // Else, if it is not active then there is no way any of the corresponding - // subtrie will be conflicting. - } - out - } - } - } - - fn insert(&mut self, mut iter: impl Iterator, con: ConflictMap) { - if let Some(pid) = iter.next() { - if let ConflictStoreTrie::Node(p) = self { - p.entry(pid) - .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new())) - .insert(iter, con); - } - // Else, we already have a subset of this in the `ConflictStore`. - } else { - // We are at the end of the set we are adding, there are three cases for what to do - // next: - // 1. `self` is an empty dummy Node inserted by `or_insert_with` - // in witch case we should replace it with `Leaf(con)`. - // 2. `self` is a `Node` because we previously inserted a superset of - // the thing we are working on (I don't know if this happens in practice) - // but the subset that we are working on will - // always match any time the larger set would have - // in witch case we can replace it with `Leaf(con)`. - // 3. `self` is a `Leaf` that is in the same spot in the structure as - // the thing we are working on. So it is equivalent. - // We can replace it with `Leaf(con)`. - if cfg!(debug_assertions) { - if let ConflictStoreTrie::Leaf(c) = self { - let a: Vec<_> = con.keys().collect(); - let b: Vec<_> = c.keys().collect(); - assert_eq!(a, b); - } - } - *self = ConflictStoreTrie::Leaf(con) - } - } -} - -pub(super) struct ConflictCache { - // `con_from_dep` is a cache of the reasons for each time we - // backtrack. For example after several backtracks we may have: - // - // con_from_dep[`foo = "^1.0.2"`] = map!{ - // `foo=1.0.1`: map!{`foo=1.0.1`: Semver}, - // `foo=1.0.0`: map!{`foo=1.0.0`: Semver}, - // }; - // - // This can be read as "we cannot find a candidate for dep `foo = "^1.0.2"` - // if either `foo=1.0.1` OR `foo=1.0.0` are activated". - // - // Another example after several backtracks we may have: - // - // con_from_dep[`foo = ">=0.8.2, <=0.9.3"`] = map!{ - // `foo=0.8.1`: map!{ - // `foo=0.9.4`: map!{`foo=0.8.1`: Semver, `foo=0.9.4`: Semver}, - // } - // }; - // - // This can be read as "we cannot find a candidate for dep `foo = ">=0.8.2, - // <=0.9.3"` if both `foo=0.8.1` AND `foo=0.9.4` are activated". - // - // This is used to make sure we don't queue work we know will fail. See the - // discussion in https://github.com/rust-lang/cargo/pull/5168 for why this - // is so important. The nested HashMaps act as a kind of btree, that lets us - // look up which entries are still active without - // linearly scanning through the full list. - // - // Also, as a final note, this map is **not** ever removed from. This remains - // as a global cache which we never delete from. Any entry in this map is - // unconditionally true regardless of our resolution history of how we got - // here. - con_from_dep: HashMap, - // `dep_from_pid` is an inverse-index of `con_from_dep`. - // For every `PackageId` this lists the `Dependency`s that mention it in `dep_from_pid`. - dep_from_pid: HashMap>, -} - -impl ConflictCache { - pub fn new() -> ConflictCache { - ConflictCache { - con_from_dep: HashMap::new(), - dep_from_pid: HashMap::new(), - } - } - pub fn find( - &self, - dep: &Dependency, - is_active: &impl Fn(PackageId) -> Option, - must_contain: Option, - max_age: usize, - ) -> Option<&ConflictMap> { - self.con_from_dep - .get(dep)? - .find(is_active, must_contain, max_age) - .map(|(c, _)| c) - } - /// Finds any known set of conflicts, if any, - /// which are activated in `cx` and contain `PackageId` specified. - /// If more than one are activated, then it will return - /// one that will allow for the most jump-back. - pub fn find_conflicting( - &self, - cx: &Context, - dep: &Dependency, - must_contain: Option, - ) -> Option<&ConflictMap> { - let out = self.find(dep, &|id| cx.is_active(id), must_contain, usize::MAX); - if cfg!(debug_assertions) { - if let Some(c) = &out { - assert!(cx.is_conflicting(None, c).is_some()); - if let Some(f) = must_contain { - assert!(c.contains_key(&f)); - } - } - } - out - } - pub fn conflicting(&self, cx: &Context, dep: &Dependency) -> Option<&ConflictMap> { - self.find_conflicting(cx, dep, None) - } - - /// Adds to the cache a conflict of the form: - /// `dep` is known to be unresolvable if - /// all the `PackageId` entries are activated. - pub fn insert(&mut self, dep: &Dependency, con: &ConflictMap) { - if con.values().any(|c| c.is_public_dependency()) { - // TODO: needs more info for back jumping - // for now refuse to cache it. - return; - } - self.con_from_dep - .entry(dep.clone()) - .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new())) - .insert(con.keys().cloned(), con.clone()); - - trace!( - "{} = \"{}\" adding a skip {:?}", - dep.package_name(), - dep.version_req(), - con - ); - - for c in con.keys() { - self.dep_from_pid - .entry(*c) - .or_insert_with(HashSet::new) - .insert(dep.clone()); - } - } - - pub fn dependencies_conflicting_with(&self, pid: PackageId) -> Option<&HashSet> { - self.dep_from_pid.get(&pid) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/context.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/context.rs deleted file mode 100644 index 4854dcde7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/context.rs +++ /dev/null @@ -1,441 +0,0 @@ -use super::dep_cache::RegistryQueryer; -use super::errors::ActivateResult; -use super::types::{ConflictMap, ConflictReason, FeaturesSet, ResolveOpts}; -use super::RequestedFeatures; -use crate::core::{Dependency, PackageId, SourceId, Summary}; -use crate::util::interning::InternedString; -use crate::util::Graph; -use anyhow::format_err; -use log::debug; -use std::collections::HashMap; -use std::num::NonZeroU64; - -pub use super::encode::Metadata; -pub use super::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; -pub use super::resolve::Resolve; - -// A `Context` is basically a bunch of local resolution information which is -// kept around for all `BacktrackFrame` instances. As a result, this runs the -// risk of being cloned *a lot* so we want to make this as cheap to clone as -// possible. -#[derive(Clone)] -pub struct Context { - pub age: ContextAge, - pub activations: Activations, - /// list the features that are activated for each package - pub resolve_features: im_rc::HashMap, - /// get the package that will be linking to a native library by its links attribute - pub links: im_rc::HashMap, - /// for each package the list of names it can see, - /// then for each name the exact version that name represents and whether the name is public. - pub public_dependency: Option, - - /// a way to look up for a package in activations what packages required it - /// and all of the exact deps that it fulfilled. - pub parents: Graph>, -} - -/// When backtracking it can be useful to know how far back to go. -/// The `ContextAge` of a `Context` is a monotonically increasing counter of the number -/// of decisions made to get to this state. -/// Several structures store the `ContextAge` when it was added, -/// to be used in `find_candidate` for backtracking. -pub type ContextAge = usize; - -/// Find the activated version of a crate based on the name, source, and semver compatibility. -/// By storing this in a hash map we ensure that there is only one -/// semver compatible version of each crate. -/// This all so stores the `ContextAge`. -pub type ActivationsKey = (InternedString, SourceId, SemverCompatibility); -pub type Activations = im_rc::HashMap; - -/// A type that represents when cargo treats two Versions as compatible. -/// Versions `a` and `b` are compatible if their left-most nonzero digit is the -/// same. -#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug, PartialOrd, Ord)] -pub enum SemverCompatibility { - Major(NonZeroU64), - Minor(NonZeroU64), - Patch(u64), -} - -impl From<&semver::Version> for SemverCompatibility { - fn from(ver: &semver::Version) -> Self { - if let Some(m) = NonZeroU64::new(ver.major) { - return SemverCompatibility::Major(m); - } - if let Some(m) = NonZeroU64::new(ver.minor) { - return SemverCompatibility::Minor(m); - } - SemverCompatibility::Patch(ver.patch) - } -} - -impl PackageId { - pub fn as_activations_key(self) -> ActivationsKey { - (self.name(), self.source_id(), self.version().into()) - } -} - -impl Context { - pub fn new(check_public_visible_dependencies: bool) -> Context { - Context { - age: 0, - resolve_features: im_rc::HashMap::new(), - links: im_rc::HashMap::new(), - public_dependency: if check_public_visible_dependencies { - Some(PublicDependency::new()) - } else { - None - }, - parents: Graph::new(), - activations: im_rc::HashMap::new(), - } - } - - /// Activate this summary by inserting it into our list of known activations. - /// - /// The `parent` passed in here is the parent summary/dependency edge which - /// cased `summary` to get activated. This may not be present for the root - /// crate, for example. - /// - /// Returns `true` if this summary with the given features is already activated. - pub fn flag_activated( - &mut self, - summary: &Summary, - opts: &ResolveOpts, - parent: Option<(&Summary, &Dependency)>, - ) -> ActivateResult { - let id = summary.package_id(); - let age: ContextAge = self.age; - match self.activations.entry(id.as_activations_key()) { - im_rc::hashmap::Entry::Occupied(o) => { - debug_assert_eq!( - &o.get().0, - summary, - "cargo does not allow two semver compatible versions" - ); - } - im_rc::hashmap::Entry::Vacant(v) => { - if let Some(link) = summary.links() { - if self.links.insert(link, id).is_some() { - return Err(format_err!( - "Attempting to resolve a dependency with more than \ - one crate with links={}.\nThis will not build as \ - is. Consider rebuilding the .lock file.", - &*link - ) - .into()); - } - } - v.insert((summary.clone(), age)); - - // If we've got a parent dependency which activated us, *and* - // the dependency has a different source id listed than the - // `summary` itself, then things get interesting. This basically - // means that a `[patch]` was used to augment `dep.source_id()` - // with `summary`. - // - // In this scenario we want to consider the activation key, as - // viewed from the perspective of `dep.source_id()`, as being - // fulfilled. This means that we need to add a second entry in - // the activations map for the source that was patched, in - // addition to the source of the actual `summary` itself. - // - // Without this it would be possible to have both 1.0.0 and - // 1.1.0 "from crates.io" in a dependency graph if one of those - // versions came from a `[patch]` source. - if let Some((_, dep)) = parent { - if dep.source_id() != id.source_id() { - let key = (id.name(), dep.source_id(), id.version().into()); - let prev = self.activations.insert(key, (summary.clone(), age)); - if let Some((previous_summary, _)) = prev { - return Err( - (previous_summary.package_id(), ConflictReason::Semver).into() - ); - } - } - } - - return Ok(false); - } - } - debug!("checking if {} is already activated", summary.package_id()); - match &opts.features { - // This returns `false` for CliFeatures just for simplicity. It - // would take a bit of work to compare since they are not in the - // same format as DepFeatures (and that may be expensive - // performance-wise). Also, it should only occur once for a root - // package. The only drawback is that it may re-activate a root - // package again, which should only affect performance, but that - // should be rare. Cycles should still be detected since those - // will have `DepFeatures` edges. - RequestedFeatures::CliFeatures(_) => Ok(false), - RequestedFeatures::DepFeatures { - features, - uses_default_features, - } => { - let has_default_feature = summary.features().contains_key("default"); - Ok(match self.resolve_features.get(&id) { - Some(prev) => { - features.is_subset(prev) - && (!uses_default_features - || prev.contains("default") - || !has_default_feature) - } - None => features.is_empty() && (!uses_default_features || !has_default_feature), - }) - } - } - } - - /// If the package is active returns the `ContextAge` when it was added - pub fn is_active(&self, id: PackageId) -> Option { - self.activations - .get(&id.as_activations_key()) - .and_then(|(s, l)| if s.package_id() == id { Some(*l) } else { None }) - } - - /// If the conflict reason on the package still applies returns the `ContextAge` when it was added - pub fn still_applies(&self, id: PackageId, reason: &ConflictReason) -> Option { - self.is_active(id).and_then(|mut max| { - match reason { - ConflictReason::PublicDependency(name) => { - if &id == name { - return Some(max); - } - max = std::cmp::max(max, self.is_active(*name)?); - max = std::cmp::max( - max, - self.public_dependency - .as_ref() - .unwrap() - .can_see_item(*name, id)?, - ); - } - ConflictReason::PubliclyExports(name) => { - if &id == name { - return Some(max); - } - max = std::cmp::max(max, self.is_active(*name)?); - max = std::cmp::max( - max, - self.public_dependency - .as_ref() - .unwrap() - .publicly_exports_item(*name, id)?, - ); - } - _ => {} - } - Some(max) - }) - } - - /// Checks whether all of `parent` and the keys of `conflicting activations` - /// are still active. - /// If so returns the `ContextAge` when the newest one was added. - pub fn is_conflicting( - &self, - parent: Option, - conflicting_activations: &ConflictMap, - ) -> Option { - let mut max = 0; - if let Some(parent) = parent { - max = std::cmp::max(max, self.is_active(parent)?); - } - - for (id, reason) in conflicting_activations.iter() { - max = std::cmp::max(max, self.still_applies(*id, reason)?); - } - Some(max) - } - - pub fn resolve_replacements( - &self, - registry: &RegistryQueryer<'_>, - ) -> HashMap { - self.activations - .values() - .filter_map(|(s, _)| registry.used_replacement_for(s.package_id())) - .collect() - } - - pub fn graph(&self) -> Graph> { - let mut graph: Graph> = Graph::new(); - self.activations - .values() - .for_each(|(r, _)| graph.add(r.package_id())); - for i in self.parents.iter() { - graph.add(*i); - for (o, e) in self.parents.edges(i) { - let old_link = graph.link(*o, *i); - assert!(old_link.is_empty()); - *old_link = e.iter().cloned().collect(); - } - } - graph - } -} - -impl Graph> { - pub fn parents_of(&self, p: PackageId) -> impl Iterator + '_ { - self.edges(&p) - .map(|(grand, d)| (*grand, d.iter().any(|x| x.is_public()))) - } -} - -#[derive(Clone, Debug, Default)] -pub struct PublicDependency { - /// For each active package the set of all the names it can see, - /// for each name the exact package that name resolves to, - /// the `ContextAge` when it was first visible, - /// and the `ContextAge` when it was first exported. - inner: im_rc::HashMap< - PackageId, - im_rc::HashMap)>, - >, -} - -impl PublicDependency { - fn new() -> Self { - PublicDependency { - inner: im_rc::HashMap::new(), - } - } - fn publicly_exports(&self, candidate_pid: PackageId) -> Vec { - self.inner - .get(&candidate_pid) // if we have seen it before - .iter() - .flat_map(|x| x.values()) // all the things we have stored - .filter(|x| x.2.is_some()) // as publicly exported - .map(|x| x.0) - .chain(Some(candidate_pid)) // but even if not we know that everything exports itself - .collect() - } - fn publicly_exports_item( - &self, - candidate_pid: PackageId, - target: PackageId, - ) -> Option { - debug_assert_ne!(candidate_pid, target); - let out = self - .inner - .get(&candidate_pid) - .and_then(|names| names.get(&target.name())) - .filter(|(p, _, _)| *p == target) - .and_then(|(_, _, age)| *age); - debug_assert_eq!( - out.is_some(), - self.publicly_exports(candidate_pid).contains(&target) - ); - out - } - pub fn can_see_item(&self, candidate_pid: PackageId, target: PackageId) -> Option { - self.inner - .get(&candidate_pid) - .and_then(|names| names.get(&target.name())) - .filter(|(p, _, _)| *p == target) - .map(|(_, age, _)| *age) - } - pub fn add_edge( - &mut self, - candidate_pid: PackageId, - parent_pid: PackageId, - is_public: bool, - age: ContextAge, - parents: &Graph>, - ) { - // one tricky part is that `candidate_pid` may already be active and - // have public dependencies of its own. So we not only need to mark - // `candidate_pid` as visible to its parents but also all of its existing - // publicly exported dependencies. - for c in self.publicly_exports(candidate_pid) { - // for each (transitive) parent that can newly see `t` - let mut stack = vec![(parent_pid, is_public)]; - while let Some((p, public)) = stack.pop() { - match self.inner.entry(p).or_default().entry(c.name()) { - im_rc::hashmap::Entry::Occupied(mut o) => { - // the (transitive) parent can already see something by `c`s name, it had better be `c`. - assert_eq!(o.get().0, c); - if o.get().2.is_some() { - // The previous time the parent saw `c`, it was a public dependency. - // So all of its parents already know about `c` - // and we can save some time by stopping now. - continue; - } - if public { - // Mark that `c` has now bean seen publicly - let old_age = o.get().1; - o.insert((c, old_age, if public { Some(age) } else { None })); - } - } - im_rc::hashmap::Entry::Vacant(v) => { - // The (transitive) parent does not have anything by `c`s name, - // so we add `c`. - v.insert((c, age, if public { Some(age) } else { None })); - } - } - // if `candidate_pid` was a private dependency of `p` then `p` parents can't see `c` thru `p` - if public { - // if it was public, then we add all of `p`s parents to be checked - stack.extend(parents.parents_of(p)); - } - } - } - } - pub fn can_add_edge( - &self, - b_id: PackageId, - parent: PackageId, - is_public: bool, - parents: &Graph>, - ) -> Result< - (), - ( - ((PackageId, ConflictReason), (PackageId, ConflictReason)), - Option<(PackageId, ConflictReason)>, - ), - > { - // one tricky part is that `candidate_pid` may already be active and - // have public dependencies of its own. So we not only need to check - // `b_id` as visible to its parents but also all of its existing - // publicly exported dependencies. - for t in self.publicly_exports(b_id) { - // for each (transitive) parent that can newly see `t` - let mut stack = vec![(parent, is_public)]; - while let Some((p, public)) = stack.pop() { - // TODO: don't look at the same thing more than once - if let Some(o) = self.inner.get(&p).and_then(|x| x.get(&t.name())) { - if o.0 != t { - // the (transitive) parent can already see a different version by `t`s name. - // So, adding `b` will cause `p` to have a public dependency conflict on `t`. - return Err(( - (o.0, ConflictReason::PublicDependency(p)), // p can see the other version and - (parent, ConflictReason::PublicDependency(p)), // p can see us - )) - .map_err(|e| { - if t == b_id { - (e, None) - } else { - (e, Some((t, ConflictReason::PubliclyExports(b_id)))) - } - }); - } - if o.2.is_some() { - // The previous time the parent saw `t`, it was a public dependency. - // So all of its parents already know about `t` - // and we can save some time by stopping now. - continue; - } - } - // if `b` was a private dependency of `p` then `p` parents can't see `t` thru `p` - if public { - // if it was public, then we add all of `p`s parents to be checked - stack.extend(parents.parents_of(p)); - } - } - } - Ok(()) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/dep_cache.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/dep_cache.rs deleted file mode 100644 index c5bf602a2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/dep_cache.rs +++ /dev/null @@ -1,531 +0,0 @@ -//! There are 2 sources of facts for the resolver: -//! -//! - The `Registry` tells us for a `Dependency` what versions are available to fulfil it. -//! - The `Summary` tells us for a version (and features) what dependencies need to be fulfilled for it to be activated. -//! -//! These constitute immutable facts, the soled ground truth that all other inference depends on. -//! Theoretically this could all be enumerated ahead of time, but we want to be lazy and only -//! look up things we need to. The compromise is to cache the results as they are computed. -//! -//! This module impl that cache in all the gory details - -use crate::core::resolver::context::Context; -use crate::core::resolver::errors::describe_path_in_context; -use crate::core::resolver::types::{ConflictReason, DepInfo, FeaturesSet}; -use crate::core::resolver::{ - ActivateError, ActivateResult, CliFeatures, RequestedFeatures, ResolveOpts, VersionOrdering, - VersionPreferences, -}; -use crate::core::{Dependency, FeatureValue, PackageId, PackageIdSpec, Registry, Summary}; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; - -use anyhow::Context as _; -use log::debug; -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::rc::Rc; - -pub struct RegistryQueryer<'a> { - pub registry: &'a mut (dyn Registry + 'a), - replacements: &'a [(PackageIdSpec, Dependency)], - version_prefs: &'a VersionPreferences, - /// If set the list of dependency candidates will be sorted by minimal - /// versions first. That allows `cargo update -Z minimal-versions` which will - /// specify minimum dependency versions to be used. - minimal_versions: bool, - /// a cache of `Candidate`s that fulfil a `Dependency` - registry_cache: HashMap>>, - /// a cache of `Dependency`s that are required for a `Summary` - summary_cache: HashMap< - (Option, Summary, ResolveOpts), - Rc<(HashSet, Rc>)>, - >, - /// all the cases we ended up using a supplied replacement - used_replacements: HashMap, -} - -impl<'a> RegistryQueryer<'a> { - pub fn new( - registry: &'a mut dyn Registry, - replacements: &'a [(PackageIdSpec, Dependency)], - version_prefs: &'a VersionPreferences, - minimal_versions: bool, - ) -> Self { - RegistryQueryer { - registry, - replacements, - version_prefs, - minimal_versions, - registry_cache: HashMap::new(), - summary_cache: HashMap::new(), - used_replacements: HashMap::new(), - } - } - - pub fn used_replacement_for(&self, p: PackageId) -> Option<(PackageId, PackageId)> { - self.used_replacements.get(&p).map(|r| (p, r.package_id())) - } - - pub fn replacement_summary(&self, p: PackageId) -> Option<&Summary> { - self.used_replacements.get(&p) - } - - /// Queries the `registry` to return a list of candidates for `dep`. - /// - /// This method is the location where overrides are taken into account. If - /// any candidates are returned which match an override then the override is - /// applied by performing a second query for what the override should - /// return. - pub fn query(&mut self, dep: &Dependency) -> CargoResult>> { - if let Some(out) = self.registry_cache.get(dep).cloned() { - return Ok(out); - } - - let mut ret = Vec::new(); - self.registry.query( - dep, - &mut |s| { - ret.push(s); - }, - false, - )?; - for summary in ret.iter_mut() { - let mut potential_matches = self - .replacements - .iter() - .filter(|&&(ref spec, _)| spec.matches(summary.package_id())); - - let &(ref spec, ref dep) = match potential_matches.next() { - None => continue, - Some(replacement) => replacement, - }; - debug!( - "found an override for {} {}", - dep.package_name(), - dep.version_req() - ); - - let mut summaries = self.registry.query_vec(dep, false)?.into_iter(); - let s = summaries.next().ok_or_else(|| { - anyhow::format_err!( - "no matching package for override `{}` found\n\ - location searched: {}\n\ - version required: {}", - spec, - dep.source_id(), - dep.version_req() - ) - })?; - let summaries = summaries.collect::>(); - if !summaries.is_empty() { - let bullets = summaries - .iter() - .map(|s| format!(" * {}", s.package_id())) - .collect::>(); - anyhow::bail!( - "the replacement specification `{}` matched \ - multiple packages:\n * {}\n{}", - spec, - s.package_id(), - bullets.join("\n") - ); - } - - // The dependency should be hard-coded to have the same name and an - // exact version requirement, so both of these assertions should - // never fail. - assert_eq!(s.version(), summary.version()); - assert_eq!(s.name(), summary.name()); - - let replace = if s.source_id() == summary.source_id() { - debug!("Preventing\n{:?}\nfrom replacing\n{:?}", summary, s); - None - } else { - Some(s) - }; - let matched_spec = spec.clone(); - - // Make sure no duplicates - if let Some(&(ref spec, _)) = potential_matches.next() { - anyhow::bail!( - "overlapping replacement specifications found:\n\n \ - * {}\n * {}\n\nboth specifications match: {}", - matched_spec, - spec, - summary.package_id() - ); - } - - for dep in summary.dependencies() { - debug!("\t{} => {}", dep.package_name(), dep.version_req()); - } - if let Some(r) = replace { - self.used_replacements.insert(summary.package_id(), r); - } - } - - // When we attempt versions for a package we'll want to do so in a sorted fashion to pick - // the "best candidates" first. VersionPreferences implements this notion. - self.version_prefs.sort_summaries( - &mut ret, - if self.minimal_versions { - VersionOrdering::MinimumVersionsFirst - } else { - VersionOrdering::MaximumVersionsFirst - }, - ); - - let out = Rc::new(ret); - - self.registry_cache.insert(dep.clone(), out.clone()); - - Ok(out) - } - - /// Find out what dependencies will be added by activating `candidate`, - /// with features described in `opts`. Then look up in the `registry` - /// the candidates that will fulfil each of these dependencies, as it is the - /// next obvious question. - pub fn build_deps( - &mut self, - cx: &Context, - parent: Option, - candidate: &Summary, - opts: &ResolveOpts, - ) -> ActivateResult, Rc>)>> { - // if we have calculated a result before, then we can just return it, - // as it is a "pure" query of its arguments. - if let Some(out) = self - .summary_cache - .get(&(parent, candidate.clone(), opts.clone())) - .cloned() - { - return Ok(out); - } - // First, figure out our set of dependencies based on the requested set - // of features. This also calculates what features we're going to enable - // for our own dependencies. - let (used_features, deps) = resolve_features(parent, candidate, opts)?; - - // Next, transform all dependencies into a list of possible candidates - // which can satisfy that dependency. - let mut deps = deps - .into_iter() - .map(|(dep, features)| { - let candidates = self.query(&dep).with_context(|| { - format!( - "failed to get `{}` as a dependency of {}", - dep.package_name(), - describe_path_in_context(cx, &candidate.package_id()), - ) - })?; - Ok((dep, candidates, features)) - }) - .collect::>>()?; - - // Attempt to resolve dependencies with fewer candidates before trying - // dependencies with more candidates. This way if the dependency with - // only one candidate can't be resolved we don't have to do a bunch of - // work before we figure that out. - deps.sort_by_key(|&(_, ref a, _)| a.len()); - - let out = Rc::new((used_features, Rc::new(deps))); - - // If we succeed we add the result to the cache so we can use it again next time. - // We don't cache the failure cases as they don't impl Clone. - self.summary_cache - .insert((parent, candidate.clone(), opts.clone()), out.clone()); - - Ok(out) - } -} - -/// Returns the features we ended up using and -/// all dependencies and the features we want from each of them. -pub fn resolve_features<'b>( - parent: Option, - s: &'b Summary, - opts: &'b ResolveOpts, -) -> ActivateResult<(HashSet, Vec<(Dependency, FeaturesSet)>)> { - // First, filter by dev-dependencies. - let deps = s.dependencies(); - let deps = deps.iter().filter(|d| d.is_transitive() || opts.dev_deps); - - let reqs = build_requirements(parent, s, opts)?; - let mut ret = Vec::new(); - let default_dep = BTreeSet::new(); - let mut valid_dep_names = HashSet::new(); - - // Next, collect all actually enabled dependencies and their features. - for dep in deps { - // Skip optional dependencies, but not those enabled through a - // feature - if dep.is_optional() && !reqs.deps.contains_key(&dep.name_in_toml()) { - continue; - } - valid_dep_names.insert(dep.name_in_toml()); - // So we want this dependency. Move the features we want from - // `feature_deps` to `ret` and register ourselves as using this - // name. - let mut base = reqs - .deps - .get(&dep.name_in_toml()) - .unwrap_or(&default_dep) - .clone(); - base.extend(dep.features().iter()); - ret.push((dep.clone(), Rc::new(base))); - } - - // This is a special case for command-line `--features - // dep_name/feat_name` where `dep_name` does not exist. All other - // validation is done either in `build_requirements` or - // `build_feature_map`. - if parent.is_none() { - for dep_name in reqs.deps.keys() { - if !valid_dep_names.contains(dep_name) { - let e = RequirementError::MissingDependency(*dep_name); - return Err(e.into_activate_error(parent, s)); - } - } - } - - Ok((reqs.into_features(), ret)) -} - -/// Takes requested features for a single package from the input `ResolveOpts` and -/// recurses to find all requested features, dependencies and requested -/// dependency features in a `Requirements` object, returning it to the resolver. -fn build_requirements<'a, 'b: 'a>( - parent: Option, - s: &'a Summary, - opts: &'b ResolveOpts, -) -> ActivateResult> { - let mut reqs = Requirements::new(s); - - let handle_default = |uses_default_features, reqs: &mut Requirements<'_>| { - if uses_default_features && s.features().contains_key("default") { - if let Err(e) = reqs.require_feature(InternedString::new("default")) { - return Err(e.into_activate_error(parent, s)); - } - } - Ok(()) - }; - - match &opts.features { - RequestedFeatures::CliFeatures(CliFeatures { - features, - all_features, - uses_default_features, - }) => { - if *all_features { - for key in s.features().keys() { - if let Err(e) = reqs.require_feature(*key) { - return Err(e.into_activate_error(parent, s)); - } - } - } else { - for fv in features.iter() { - if let Err(e) = reqs.require_value(fv) { - return Err(e.into_activate_error(parent, s)); - } - } - handle_default(*uses_default_features, &mut reqs)?; - } - } - RequestedFeatures::DepFeatures { - features, - uses_default_features, - } => { - for feature in features.iter() { - if let Err(e) = reqs.require_feature(*feature) { - return Err(e.into_activate_error(parent, s)); - } - } - handle_default(*uses_default_features, &mut reqs)?; - } - } - - Ok(reqs) -} - -/// Set of feature and dependency requirements for a package. -#[derive(Debug)] -struct Requirements<'a> { - summary: &'a Summary, - /// The deps map is a mapping of dependency name to list of features enabled. - /// - /// The resolver will activate all of these dependencies, with the given - /// features enabled. - deps: HashMap>, - /// The set of features enabled on this package which is later used when - /// compiling to instruct the code what features were enabled. - features: HashSet, -} - -/// An error for a requirement. -/// -/// This will later be converted to an `ActivateError` depending on whether or -/// not this is a dependency or a root package. -enum RequirementError { - /// The package does not have the requested feature. - MissingFeature(InternedString), - /// The package does not have the requested dependency. - MissingDependency(InternedString), - /// A feature has a direct cycle to itself. - /// - /// Note that cycles through multiple features are allowed (but perhaps - /// they shouldn't be?). - Cycle(InternedString), -} - -impl Requirements<'_> { - fn new(summary: &Summary) -> Requirements<'_> { - Requirements { - summary, - deps: HashMap::new(), - features: HashSet::new(), - } - } - - fn into_features(self) -> HashSet { - self.features - } - - fn require_dep_feature( - &mut self, - package: InternedString, - feat: InternedString, - weak: bool, - ) -> Result<(), RequirementError> { - // If `package` is indeed an optional dependency then we activate the - // feature named `package`, but otherwise if `package` is a required - // dependency then there's no feature associated with it. - if !weak - && self - .summary - .dependencies() - .iter() - .any(|dep| dep.name_in_toml() == package && dep.is_optional()) - { - self.require_feature(package)?; - } - self.deps.entry(package).or_default().insert(feat); - Ok(()) - } - - fn require_dependency(&mut self, pkg: InternedString) { - self.deps.entry(pkg).or_default(); - } - - fn require_feature(&mut self, feat: InternedString) -> Result<(), RequirementError> { - if !self.features.insert(feat) { - // Already seen this feature. - return Ok(()); - } - - let fvs = match self.summary.features().get(&feat) { - Some(fvs) => fvs, - None => return Err(RequirementError::MissingFeature(feat)), - }; - - for fv in fvs { - if let FeatureValue::Feature(dep_feat) = fv { - if *dep_feat == feat { - return Err(RequirementError::Cycle(feat)); - } - } - self.require_value(fv)?; - } - Ok(()) - } - - fn require_value(&mut self, fv: &FeatureValue) -> Result<(), RequirementError> { - match fv { - FeatureValue::Feature(feat) => self.require_feature(*feat)?, - FeatureValue::Dep { dep_name } => self.require_dependency(*dep_name), - FeatureValue::DepFeature { - dep_name, - dep_feature, - // Weak features are always activated in the dependency - // resolver. They will be narrowed inside the new feature - // resolver. - weak, - } => self.require_dep_feature(*dep_name, *dep_feature, *weak)?, - }; - Ok(()) - } -} - -impl RequirementError { - fn into_activate_error(self, parent: Option, summary: &Summary) -> ActivateError { - match self { - RequirementError::MissingFeature(feat) => { - let deps: Vec<_> = summary - .dependencies() - .iter() - .filter(|dep| dep.name_in_toml() == feat) - .collect(); - if deps.is_empty() { - return match parent { - None => ActivateError::Fatal(anyhow::format_err!( - "Package `{}` does not have the feature `{}`", - summary.package_id(), - feat - )), - Some(p) => ActivateError::Conflict( - p, - ConflictReason::MissingFeatures(feat.to_string()), - ), - }; - } - if deps.iter().any(|dep| dep.is_optional()) { - match parent { - None => ActivateError::Fatal(anyhow::format_err!( - "Package `{}` does not have feature `{}`. It has an optional dependency \ - with that name, but that dependency uses the \"dep:\" \ - syntax in the features table, so it does not have an implicit feature with that name.", - summary.package_id(), - feat - )), - Some(p) => ActivateError::Conflict( - p, - ConflictReason::NonImplicitDependencyAsFeature(feat), - ), - } - } else { - match parent { - None => ActivateError::Fatal(anyhow::format_err!( - "Package `{}` does not have feature `{}`. It has a required dependency \ - with that name, but only optional dependencies can be used as features.", - summary.package_id(), - feat - )), - Some(p) => ActivateError::Conflict( - p, - ConflictReason::RequiredDependencyAsFeature(feat), - ), - } - } - } - RequirementError::MissingDependency(dep_name) => { - match parent { - None => ActivateError::Fatal(anyhow::format_err!( - "package `{}` does not have a dependency named `{}`", - summary.package_id(), - dep_name - )), - // This code path currently isn't used, since `foo/bar` - // and `dep:` syntax is not allowed in a dependency. - Some(p) => ActivateError::Conflict( - p, - ConflictReason::MissingFeatures(dep_name.to_string()), - ), - } - } - RequirementError::Cycle(feat) => ActivateError::Fatal(anyhow::format_err!( - "cyclic feature dependency: feature `{}` depends on itself", - feat - )), - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/encode.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/encode.rs deleted file mode 100644 index 88d0d8296..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/encode.rs +++ /dev/null @@ -1,719 +0,0 @@ -//! Definition of how to encode a `Resolve` into a TOML `Cargo.lock` file -//! -//! This module contains all machinery necessary to parse a `Resolve` from a -//! `Cargo.lock` as well as serialize a `Resolve` to a `Cargo.lock`. -//! -//! ## Changing `Cargo.lock` -//! -//! In general Cargo is quite conservative about changing the format of -//! `Cargo.lock`. Usage of new features in Cargo can change `Cargo.lock` at any -//! time, but otherwise changing the serialization of `Cargo.lock` is a -//! difficult operation to do that we typically avoid. -//! -//! The main problem with changing the format of `Cargo.lock` is that it can -//! cause quite a bad experience for end users who use different versions of -//! Cargo. If every PR to a project oscillates between the stable channel's -//! encoding of Cargo.lock and the nightly channel's encoding then that's a -//! pretty bad experience. -//! -//! We do, however, want to change `Cargo.lock` over time. (and we have!). To do -//! this the rules that we currently have are: -//! -//! * Add support for the new format to Cargo. This involves code changes in -//! Cargo itself, likely by adding a new variant of `ResolveVersion` and -//! branching on that where necessary. This is accompanied with tests in the -//! `lockfile_compat` module. -//! -//! * Do not update `ResolveVersion::default()`. The new lockfile format will -//! not be used yet. -//! -//! * Preserve the new format if found. This means that if Cargo finds the new -//! version it'll keep using it, but otherwise it continues to use whatever -//! format it previously found. -//! -//! * Wait a "long time". This is at least until the changes here hit stable -//! Rust. Often though we wait a little longer to let the changes percolate -//! into one or two older stable releases. -//! -//! * Change the return value of `ResolveVersion::default()` to the new format. -//! This will cause new lock files to use the latest encoding as well as -//! causing any operation which updates the lock file to update to the new -//! format. -//! -//! This migration scheme in general means that Cargo we'll get *support* for a -//! new format into Cargo ASAP, but it won't be exercised yet (except in Cargo's -//! own tests). Eventually when stable/beta/nightly all have support for the new -//! format (and maybe a few previous stable versions) we flip the switch. -//! Projects on nightly will quickly start seeing changes, but -//! stable/beta/nightly will all understand this new format and will preserve -//! it. -//! -//! While this does mean that projects' `Cargo.lock` changes over time, it's -//! typically a pretty minimal effort change that's just "check in what's -//! there". -//! -//! ## Historical changes to `Cargo.lock` -//! -//! Listed from most recent to oldest, these are some of the changes we've made -//! to `Cargo.lock`'s serialization format: -//! -//! * A `version` marker is now at the top of the lock file which is a way for -//! super-old Cargos (at least since this was implemented) to give a formal -//! error if they see a lock file from a super-future Cargo. Additionally as -//! part of this change the encoding of `git` dependencies in lock files -//! changed where `branch = "master"` is now encoded with `branch=master` -//! instead of with nothing at all. -//! -//! * The entries in `dependencies` arrays have been shortened and the -//! `checksum` field now shows up directly in `[[package]]` instead of always -//! at the end of the file. The goal of this change was to ideally reduce -//! merge conflicts being generated on `Cargo.lock`. Updating a version of a -//! package now only updates two lines in the file, the checksum and the -//! version number, most of the time. Dependency edges are specified in a -//! compact form where possible where just the name is listed. The -//! version/source on dependency edges are only listed if necessary to -//! disambiguate which version or which source is in use. -//! -//! * A comment at the top of the file indicates that the file is a generated -//! file and contains the special symbol `@generated` to indicate to common -//! review tools that it's a generated file. -//! -//! * A `[root]` entry for the "root crate" has been removed and instead now -//! included in `[[package]]` like everything else. -//! -//! * All packages from registries contain a `checksum` which is a sha256 -//! checksum of the tarball the package is associated with. This is all stored -//! in the `[metadata]` table of `Cargo.lock` which all versions of Cargo -//! since 1.0 have preserved. The goal of this was to start recording -//! checksums so mirror sources can be verified. -//! -//! ## Other oddities about `Cargo.lock` -//! -//! There's a few other miscellaneous weird things about `Cargo.lock` that you -//! may want to be aware of when reading this file: -//! -//! * All packages have a `source` listed to indicate where they come from. For -//! `path` dependencies, however, no `source` is listed. There's no way we -//! could emit a filesystem path name and have that be portable across -//! systems, so all packages from a `path` are not listed with a `source`. -//! Note that this also means that all packages with `path` sources must have -//! unique names. -//! -//! * The `[metadata]` table in `Cargo.lock` is intended to be a generic mapping -//! of strings to strings that's simply preserved by Cargo. This was a very -//! early effort to be forward compatible against changes to `Cargo.lock`'s -//! format. This is nowadays sort of deemed a bad idea though and we don't -//! really use it that much except for `checksum`s historically. It's not -//! really recommended to use this. -//! -//! * The actual literal on-disk serialiation is found in -//! `src/cargo/ops/lockfile.rs` which basically renders a `toml::Value` in a -//! special fashion to make sure we have strict control over the on-disk -//! format. - -use super::{Resolve, ResolveVersion}; -use crate::core::{Dependency, GitReference, Package, PackageId, SourceId, Workspace}; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::{internal, Graph}; -use anyhow::{bail, Context as _}; -use log::debug; -use serde::de; -use serde::ser; -use serde::{Deserialize, Serialize}; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::fmt; -use std::str::FromStr; - -/// The `Cargo.lock` structure. -#[derive(Serialize, Deserialize, Debug)] -pub struct EncodableResolve { - version: Option, - package: Option>, - /// `root` is optional to allow backward compatibility. - root: Option, - metadata: Option, - #[serde(default, skip_serializing_if = "Patch::is_empty")] - patch: Patch, -} - -#[derive(Serialize, Deserialize, Debug, Default)] -struct Patch { - unused: Vec, -} - -pub type Metadata = BTreeMap; - -impl EncodableResolve { - /// Convert a `Cargo.lock` to a Resolve. - /// - /// Note that this `Resolve` is not "complete". For example, the - /// dependencies do not know the difference between regular/dev/build - /// dependencies, so they are not filled in. It also does not include - /// `features`. Care should be taken when using this Resolve. One of the - /// primary uses is to be used with `resolve_with_previous` to guide the - /// resolver to create a complete Resolve. - pub fn into_resolve(self, original: &str, ws: &Workspace<'_>) -> CargoResult { - let path_deps = build_path_deps(ws)?; - let mut checksums = HashMap::new(); - - let mut version = match self.version { - Some(3) => ResolveVersion::V3, - Some(n) => bail!( - "lock file version `{}` was found, but this version of Cargo \ - does not understand this lock file, perhaps Cargo needs \ - to be updated?", - n, - ), - // Historically Cargo did not have a version indicator in lock - // files, so this could either be the V1 or V2 encoding. We assume - // an older format is being parsed until we see so otherwise. - None => ResolveVersion::V1, - }; - - let packages = { - let mut packages = self.package.unwrap_or_default(); - if let Some(root) = self.root { - packages.insert(0, root); - } - packages - }; - - // `PackageId`s in the lock file don't include the `source` part - // for workspace members, so we reconstruct proper IDs. - let live_pkgs = { - let mut live_pkgs = HashMap::new(); - let mut all_pkgs = HashSet::new(); - for pkg in packages.iter() { - let enc_id = EncodablePackageId { - name: pkg.name.clone(), - version: Some(pkg.version.clone()), - source: pkg.source, - }; - - if !all_pkgs.insert(enc_id.clone()) { - anyhow::bail!("package `{}` is specified twice in the lockfile", pkg.name); - } - let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { - // We failed to find a local package in the workspace. - // It must have been removed and should be ignored. - None => { - debug!("path dependency now missing {} v{}", pkg.name, pkg.version); - continue; - } - Some(&source) => PackageId::new(&pkg.name, &pkg.version, source)?, - }; - - // If a package has a checksum listed directly on it then record - // that here, and we also bump our version up to 2 since V1 - // didn't ever encode this field. - if let Some(cksum) = &pkg.checksum { - version = version.max(ResolveVersion::V2); - checksums.insert(id, Some(cksum.clone())); - } - - assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none()) - } - live_pkgs - }; - - // When decoding a V2 version the edges in `dependencies` aren't - // guaranteed to have either version or source information. This `map` - // is used to find package ids even if dependencies have missing - // information. This map is from name to version to source to actual - // package ID. (various levels to drill down step by step) - let mut map = HashMap::new(); - for (id, _) in live_pkgs.values() { - map.entry(id.name().as_str()) - .or_insert_with(HashMap::new) - .entry(id.version().to_string()) - .or_insert_with(HashMap::new) - .insert(id.source_id(), *id); - } - - let mut lookup_id = |enc_id: &EncodablePackageId| -> Option { - // The name of this package should always be in the larger list of - // all packages. - let by_version = map.get(enc_id.name.as_str())?; - - // If the version is provided, look that up. Otherwise if the - // version isn't provided this is a V2 manifest and we should only - // have one version for this name. If we have more than one version - // for the name then it's ambiguous which one we'd use. That - // shouldn't ever actually happen but in theory bad git merges could - // produce invalid lock files, so silently ignore these cases. - let by_source = match &enc_id.version { - Some(version) => by_version.get(version)?, - None => { - version = version.max(ResolveVersion::V2); - if by_version.len() == 1 { - by_version.values().next().unwrap() - } else { - return None; - } - } - }; - - // This is basically the same as above. Note though that `source` is - // always missing for path dependencies regardless of serialization - // format. That means we have to handle the `None` case a bit more - // carefully. - match &enc_id.source { - Some(source) => by_source.get(source).cloned(), - None => { - // Look through all possible packages ids for this - // name/version. If there's only one `path` dependency then - // we are hardcoded to use that since `path` dependencies - // can't have a source listed. - let mut path_packages = by_source.values().filter(|p| p.source_id().is_path()); - if let Some(path) = path_packages.next() { - if path_packages.next().is_some() { - return None; - } - Some(*path) - - // ... otherwise if there's only one then we must be - // implicitly using that one due to a V2 serialization of - // the lock file - } else if by_source.len() == 1 { - let id = by_source.values().next().unwrap(); - version = version.max(ResolveVersion::V2); - Some(*id) - - // ... and failing that we probably had a bad git merge of - // `Cargo.lock` or something like that, so just ignore this. - } else { - None - } - } - } - }; - - let mut g = Graph::new(); - - for &(ref id, _) in live_pkgs.values() { - g.add(*id); - } - - for &(ref id, pkg) in live_pkgs.values() { - let deps = match pkg.dependencies { - Some(ref deps) => deps, - None => continue, - }; - - for edge in deps.iter() { - if let Some(to_depend_on) = lookup_id(edge) { - g.link(*id, to_depend_on); - } - } - } - - let replacements = { - let mut replacements = HashMap::new(); - for &(ref id, pkg) in live_pkgs.values() { - if let Some(ref replace) = pkg.replace { - assert!(pkg.dependencies.is_none()); - if let Some(replace_id) = lookup_id(replace) { - replacements.insert(*id, replace_id); - } - } - } - replacements - }; - - let mut metadata = self.metadata.unwrap_or_default(); - - // In the V1 serialization formats all checksums were listed in the lock - // file in the `[metadata]` section, so if we're still V1 then look for - // that here. - let prefix = "checksum "; - let mut to_remove = Vec::new(); - for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { - to_remove.push(k.to_string()); - let k = &k[prefix.len()..]; - let enc_id: EncodablePackageId = k - .parse() - .with_context(|| internal("invalid encoding of checksum in lockfile"))?; - let id = match lookup_id(&enc_id) { - Some(id) => id, - _ => continue, - }; - - let v = if v == "" { - None - } else { - Some(v.to_string()) - }; - checksums.insert(id, v); - } - // If `checksum` was listed in `[metadata]` but we were previously - // listed as `V2` then assume some sort of bad git merge happened, so - // discard all checksums and let's regenerate them later. - if !to_remove.is_empty() && version >= ResolveVersion::V2 { - checksums.drain(); - } - for k in to_remove { - metadata.remove(&k); - } - - let mut unused_patches = Vec::new(); - for pkg in self.patch.unused { - let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { - Some(&src) => PackageId::new(&pkg.name, &pkg.version, src)?, - None => continue, - }; - unused_patches.push(id); - } - - // We have a curious issue where in the "v1 format" we buggily had a - // trailing blank line at the end of lock files under some specific - // conditions. - // - // Cargo is trying to write new lockfies in the "v2 format" but if you - // have no dependencies, for example, then the lockfile encoded won't - // really have any indicator that it's in the new format (no - // dependencies or checksums listed). This means that if you type `cargo - // new` followed by `cargo build` it will generate a "v2 format" lock - // file since none previously existed. When reading this on the next - // `cargo build`, however, it generates a new lock file because when - // reading in that lockfile we think it's the v1 format. - // - // To help fix this issue we special case here. If our lockfile only has - // one trailing newline, not two, *and* it only has one package, then - // this is actually the v2 format. - if original.ends_with('\n') - && !original.ends_with("\n\n") - && version == ResolveVersion::V1 - && g.iter().count() == 1 - { - version = ResolveVersion::V2; - } - - Ok(Resolve::new( - g, - replacements, - HashMap::new(), - checksums, - metadata, - unused_patches, - version, - HashMap::new(), - )) - } -} - -fn build_path_deps(ws: &Workspace<'_>) -> CargoResult> { - // If a crate is **not** a path source, then we're probably in a situation - // such as `cargo install` with a lock file from a remote dependency. In - // that case we don't need to fixup any path dependencies (as they're not - // actually path dependencies any more), so we ignore them. - let members = ws - .members() - .filter(|p| p.package_id().source_id().is_path()) - .collect::>(); - - let mut ret = HashMap::new(); - let mut visited = HashSet::new(); - for member in members.iter() { - ret.insert( - member.package_id().name().to_string(), - member.package_id().source_id(), - ); - visited.insert(member.package_id().source_id()); - } - for member in members.iter() { - build_pkg(member, ws, &mut ret, &mut visited); - } - for deps in ws.root_patch()?.values() { - for dep in deps { - build_dep(dep, ws, &mut ret, &mut visited); - } - } - for &(_, ref dep) in ws.root_replace() { - build_dep(dep, ws, &mut ret, &mut visited); - } - - return Ok(ret); - - fn build_pkg( - pkg: &Package, - ws: &Workspace<'_>, - ret: &mut HashMap, - visited: &mut HashSet, - ) { - for dep in pkg.dependencies() { - build_dep(dep, ws, ret, visited); - } - } - - fn build_dep( - dep: &Dependency, - ws: &Workspace<'_>, - ret: &mut HashMap, - visited: &mut HashSet, - ) { - let id = dep.source_id(); - if visited.contains(&id) || !id.is_path() { - return; - } - let path = match id.url().to_file_path() { - Ok(p) => p.join("Cargo.toml"), - Err(_) => return, - }; - let pkg = match ws.load(&path) { - Ok(p) => p, - Err(_) => return, - }; - ret.insert(pkg.name().to_string(), pkg.package_id().source_id()); - visited.insert(pkg.package_id().source_id()); - build_pkg(&pkg, ws, ret, visited); - } -} - -impl Patch { - fn is_empty(&self) -> bool { - self.unused.is_empty() - } -} - -#[derive(Serialize, Deserialize, Debug, PartialOrd, Ord, PartialEq, Eq)] -pub struct EncodableDependency { - name: String, - version: String, - source: Option, - checksum: Option, - dependencies: Option>, - replace: Option, -} - -#[derive(Debug, PartialOrd, Ord, PartialEq, Eq, Hash, Clone)] -pub struct EncodablePackageId { - name: String, - version: Option, - source: Option, -} - -impl fmt::Display for EncodablePackageId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name)?; - if let Some(s) = &self.version { - write!(f, " {}", s)?; - } - if let Some(s) = &self.source { - write!(f, " ({})", s.as_url())?; - } - Ok(()) - } -} - -impl FromStr for EncodablePackageId { - type Err = anyhow::Error; - - fn from_str(s: &str) -> CargoResult { - let mut s = s.splitn(3, ' '); - let name = s.next().unwrap(); - let version = s.next(); - let source_id = match s.next() { - Some(s) => { - if s.starts_with('(') && s.ends_with(')') { - Some(SourceId::from_url(&s[1..s.len() - 1])?) - } else { - anyhow::bail!("invalid serialized PackageId") - } - } - None => None, - }; - - Ok(EncodablePackageId { - name: name.to_string(), - version: version.map(|v| v.to_string()), - source: source_id, - }) - } -} - -impl ser::Serialize for EncodablePackageId { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - s.collect_str(self) - } -} - -impl<'de> de::Deserialize<'de> for EncodablePackageId { - fn deserialize(d: D) -> Result - where - D: de::Deserializer<'de>, - { - String::deserialize(d).and_then(|string| { - string - .parse::() - .map_err(de::Error::custom) - }) - } -} - -impl ser::Serialize for Resolve { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - let mut ids: Vec<_> = self.iter().collect(); - ids.sort(); - - let state = EncodeState::new(self); - - let encodable = ids - .iter() - .map(|&id| encodable_resolve_node(id, self, &state)) - .collect::>(); - - let mut metadata = self.metadata().clone(); - - if self.version() == ResolveVersion::V1 { - for &id in ids.iter().filter(|id| !id.source_id().is_path()) { - let checksum = match self.checksums()[&id] { - Some(ref s) => &s[..], - None => "", - }; - let id = encodable_package_id(id, &state, self.version()); - metadata.insert(format!("checksum {}", id.to_string()), checksum.to_string()); - } - } - - let metadata = if metadata.is_empty() { - None - } else { - Some(metadata) - }; - - let patch = Patch { - unused: self - .unused_patches() - .iter() - .map(|id| EncodableDependency { - name: id.name().to_string(), - version: id.version().to_string(), - source: encode_source(id.source_id()), - dependencies: None, - replace: None, - checksum: if self.version() >= ResolveVersion::V2 { - self.checksums().get(id).and_then(|x| x.clone()) - } else { - None - }, - }) - .collect(), - }; - EncodableResolve { - package: Some(encodable), - root: None, - metadata, - patch, - version: match self.version() { - ResolveVersion::V3 => Some(3), - ResolveVersion::V2 | ResolveVersion::V1 => None, - }, - } - .serialize(s) - } -} - -pub struct EncodeState<'a> { - counts: Option>>, -} - -impl<'a> EncodeState<'a> { - pub fn new(resolve: &'a Resolve) -> EncodeState<'a> { - let counts = if resolve.version() >= ResolveVersion::V2 { - let mut map = HashMap::new(); - for id in resolve.iter() { - let slot = map - .entry(id.name()) - .or_insert_with(HashMap::new) - .entry(id.version()) - .or_insert(0); - *slot += 1; - } - Some(map) - } else { - None - }; - EncodeState { counts } - } -} - -fn encodable_resolve_node( - id: PackageId, - resolve: &Resolve, - state: &EncodeState<'_>, -) -> EncodableDependency { - let (replace, deps) = match resolve.replacement(id) { - Some(id) => ( - Some(encodable_package_id(id, state, resolve.version())), - None, - ), - None => { - let mut deps = resolve - .deps_not_replaced(id) - .map(|(id, _)| encodable_package_id(id, state, resolve.version())) - .collect::>(); - deps.sort(); - (None, Some(deps)) - } - }; - - EncodableDependency { - name: id.name().to_string(), - version: id.version().to_string(), - source: encode_source(id.source_id()), - dependencies: deps, - replace, - checksum: if resolve.version() >= ResolveVersion::V2 { - resolve.checksums().get(&id).and_then(|s| s.clone()) - } else { - None - }, - } -} - -pub fn encodable_package_id( - id: PackageId, - state: &EncodeState<'_>, - resolve_version: ResolveVersion, -) -> EncodablePackageId { - let mut version = Some(id.version().to_string()); - let mut id_to_encode = id.source_id(); - if resolve_version <= ResolveVersion::V2 { - if let Some(GitReference::Branch(b)) = id_to_encode.git_reference() { - if b == "master" { - id_to_encode = - SourceId::for_git(id_to_encode.url(), GitReference::DefaultBranch).unwrap(); - } - } - } - let mut source = encode_source(id_to_encode).map(|s| s.with_precise(None)); - if let Some(counts) = &state.counts { - let version_counts = &counts[&id.name()]; - if version_counts[&id.version()] == 1 { - source = None; - if version_counts.len() == 1 { - version = None; - } - } - } - EncodablePackageId { - name: id.name().to_string(), - version, - source, - } -} - -fn encode_source(id: SourceId) -> Option { - if id.is_path() { - None - } else { - Some(id) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/errors.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/errors.rs deleted file mode 100644 index 5cabd01ba..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/errors.rs +++ /dev/null @@ -1,408 +0,0 @@ -use std::fmt; - -use crate::core::{Dependency, PackageId, Registry, Summary}; -use crate::util::lev_distance::lev_distance; -use crate::util::{Config, VersionExt}; -use anyhow::Error; - -use super::context::Context; -use super::types::{ConflictMap, ConflictReason}; - -/// Error during resolution providing a path of `PackageId`s. -pub struct ResolveError { - cause: Error, - package_path: Vec, -} - -impl ResolveError { - pub fn new>(cause: E, package_path: Vec) -> Self { - Self { - cause: cause.into(), - package_path, - } - } - - /// Returns a path of packages from the package whose requirements could not be resolved up to - /// the root. - pub fn package_path(&self) -> &[PackageId] { - &self.package_path - } -} - -impl std::error::Error for ResolveError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.cause.source() - } -} - -impl fmt::Debug for ResolveError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.cause.fmt(f) - } -} - -impl fmt::Display for ResolveError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.cause.fmt(f) - } -} - -pub type ActivateResult = Result; - -#[derive(Debug)] -pub enum ActivateError { - Fatal(anyhow::Error), - Conflict(PackageId, ConflictReason), -} - -impl From<::anyhow::Error> for ActivateError { - fn from(t: ::anyhow::Error) -> Self { - ActivateError::Fatal(t) - } -} - -impl From<(PackageId, ConflictReason)> for ActivateError { - fn from(t: (PackageId, ConflictReason)) -> Self { - ActivateError::Conflict(t.0, t.1) - } -} - -pub(super) fn activation_error( - cx: &Context, - registry: &mut dyn Registry, - parent: &Summary, - dep: &Dependency, - conflicting_activations: &ConflictMap, - candidates: &[Summary], - config: Option<&Config>, -) -> ResolveError { - let to_resolve_err = |err| { - ResolveError::new( - err, - cx.parents - .path_to_bottom(&parent.package_id()) - .into_iter() - .map(|(node, _)| node) - .cloned() - .collect(), - ) - }; - - if !candidates.is_empty() { - let mut msg = format!("failed to select a version for `{}`.", dep.package_name()); - msg.push_str("\n ... required by "); - msg.push_str(&describe_path_in_context(cx, &parent.package_id())); - - msg.push_str("\nversions that meet the requirements `"); - msg.push_str(&dep.version_req().to_string()); - msg.push_str("` are: "); - msg.push_str( - &candidates - .iter() - .map(|v| v.version()) - .map(|v| v.to_string()) - .collect::>() - .join(", "), - ); - - let mut conflicting_activations: Vec<_> = conflicting_activations.iter().collect(); - conflicting_activations.sort_unstable(); - // This is reversed to show the newest versions first. I don't know if there is - // a strong reason to do this, but that is how the code previously worked - // (see https://github.com/rust-lang/cargo/pull/5037) and I don't feel like changing it. - conflicting_activations.reverse(); - // Flag used for grouping all semver errors together. - let mut has_semver = false; - - for (p, r) in &conflicting_activations { - match r { - ConflictReason::Semver => { - has_semver = true; - } - ConflictReason::Links(link) => { - msg.push_str("\n\nthe package `"); - msg.push_str(&*dep.package_name()); - msg.push_str("` links to the native library `"); - msg.push_str(link); - msg.push_str("`, but it conflicts with a previous package which links to `"); - msg.push_str(link); - msg.push_str("` as well:\n"); - msg.push_str(&describe_path_in_context(cx, p)); - msg.push_str("\nOnly one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. "); - msg.push_str("Try to adjust your dependencies so that only one package uses the links ='"); - msg.push_str(&*dep.package_name()); - msg.push_str("' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links."); - } - ConflictReason::MissingFeatures(features) => { - msg.push_str("\n\nthe package `"); - msg.push_str(&*p.name()); - msg.push_str("` depends on `"); - msg.push_str(&*dep.package_name()); - msg.push_str("`, with features: `"); - msg.push_str(features); - msg.push_str("` but `"); - msg.push_str(&*dep.package_name()); - msg.push_str("` does not have these features.\n"); - // p == parent so the full path is redundant. - } - ConflictReason::RequiredDependencyAsFeature(features) => { - msg.push_str("\n\nthe package `"); - msg.push_str(&*p.name()); - msg.push_str("` depends on `"); - msg.push_str(&*dep.package_name()); - msg.push_str("`, with features: `"); - msg.push_str(features); - msg.push_str("` but `"); - msg.push_str(&*dep.package_name()); - msg.push_str("` does not have these features.\n"); - msg.push_str( - " It has a required dependency with that name, \ - but only optional dependencies can be used as features.\n", - ); - // p == parent so the full path is redundant. - } - ConflictReason::NonImplicitDependencyAsFeature(features) => { - msg.push_str("\n\nthe package `"); - msg.push_str(&*p.name()); - msg.push_str("` depends on `"); - msg.push_str(&*dep.package_name()); - msg.push_str("`, with features: `"); - msg.push_str(features); - msg.push_str("` but `"); - msg.push_str(&*dep.package_name()); - msg.push_str("` does not have these features.\n"); - msg.push_str( - " It has an optional dependency with that name, \ - but but that dependency uses the \"dep:\" \ - syntax in the features table, so it does not have an \ - implicit feature with that name.\n", - ); - // p == parent so the full path is redundant. - } - ConflictReason::PublicDependency(pkg_id) => { - // TODO: This needs to be implemented. - unimplemented!("pub dep {:?}", pkg_id); - } - ConflictReason::PubliclyExports(pkg_id) => { - // TODO: This needs to be implemented. - unimplemented!("pub exp {:?}", pkg_id); - } - } - } - - if has_semver { - // Group these errors together. - msg.push_str("\n\nall possible versions conflict with previously selected packages."); - for (p, r) in &conflicting_activations { - if let ConflictReason::Semver = r { - msg.push_str("\n\n previously selected "); - msg.push_str(&describe_path_in_context(cx, p)); - } - } - } - - msg.push_str("\n\nfailed to select a version for `"); - msg.push_str(&*dep.package_name()); - msg.push_str("` which could resolve this conflict"); - - return to_resolve_err(anyhow::format_err!("{}", msg)); - } - - // We didn't actually find any candidates, so we need to - // give an error message that nothing was found. - // - // Maybe the user mistyped the ver_req? Like `dep="2"` when `dep="0.2"` - // was meant. So we re-query the registry with `deb="*"` so we can - // list a few versions that were actually found. - let all_req = semver::VersionReq::parse("*").unwrap(); - let mut new_dep = dep.clone(); - new_dep.set_version_req(all_req); - let mut candidates = match registry.query_vec(&new_dep, false) { - Ok(candidates) => candidates, - Err(e) => return to_resolve_err(e), - }; - candidates.sort_unstable_by(|a, b| b.version().cmp(a.version())); - - let mut msg = - if !candidates.is_empty() { - let versions = { - let mut versions = candidates - .iter() - .take(3) - .map(|cand| cand.version().to_string()) - .collect::>(); - - if candidates.len() > 3 { - versions.push("...".into()); - } - - versions.join(", ") - }; - - let mut msg = format!( - "failed to select a version for the requirement `{} = \"{}\"`\n\ - candidate versions found which didn't match: {}\n\ - location searched: {}\n", - dep.package_name(), - dep.version_req(), - versions, - registry.describe_source(dep.source_id()), - ); - msg.push_str("required by "); - msg.push_str(&describe_path_in_context(cx, &parent.package_id())); - - // If we have a path dependency with a locked version, then this may - // indicate that we updated a sub-package and forgot to run `cargo - // update`. In this case try to print a helpful error! - if dep.source_id().is_path() && dep.version_req().to_string().starts_with('=') { - msg.push_str( - "\nconsider running `cargo update` to update \ - a path dependency's locked version", - ); - } - - if registry.is_replaced(dep.source_id()) { - msg.push_str("\nperhaps a crate was updated and forgotten to be re-vendored?"); - } - - msg - } else { - // Maybe the user mistyped the name? Like `dep-thing` when `Dep_Thing` - // was meant. So we try asking the registry for a `fuzzy` search for suggestions. - let mut candidates = Vec::new(); - if let Err(e) = registry.query(&new_dep, &mut |s| candidates.push(s), true) { - return to_resolve_err(e); - }; - candidates.sort_unstable_by_key(|a| a.name()); - candidates.dedup_by(|a, b| a.name() == b.name()); - let mut candidates: Vec<_> = candidates - .iter() - .map(|n| (lev_distance(&*new_dep.package_name(), &*n.name()), n)) - .filter(|&(d, _)| d < 4) - .collect(); - candidates.sort_by_key(|o| o.0); - let mut msg: String; - if candidates.is_empty() { - msg = format!("no matching package named `{}` found\n", dep.package_name()); - } else { - msg = format!( - "no matching package found\nsearched package name: `{}`\n", - dep.package_name() - ); - - // If dependency package name is equal to the name of the candidate here - // it may be a prerelease package which hasn't been specified correctly - if dep.package_name() == candidates[0].1.name() - && candidates[0].1.package_id().version().is_prerelease() - { - msg.push_str("prerelease package needs to be specified explicitly\n"); - msg.push_str(&format!( - "{name} = {{ version = \"{version}\" }}", - name = candidates[0].1.name(), - version = candidates[0].1.package_id().version() - )); - } else { - let mut names = candidates - .iter() - .take(3) - .map(|c| c.1.name().as_str()) - .collect::>(); - - if candidates.len() > 3 { - names.push("..."); - } - // Vertically align first suggestion with missing crate name - // so a typo jumps out at you. - msg.push_str("perhaps you meant: "); - msg.push_str(&names.iter().enumerate().fold( - String::default(), - |acc, (i, el)| match i { - 0 => acc + el, - i if names.len() - 1 == i && candidates.len() <= 3 => acc + " or " + el, - _ => acc + ", " + el, - }, - )); - } - msg.push('\n'); - } - msg.push_str(&format!("location searched: {}\n", dep.source_id())); - msg.push_str("required by "); - msg.push_str(&describe_path_in_context(cx, &parent.package_id())); - - msg - }; - - if let Some(config) = config { - if config.offline() { - msg.push_str( - "\nAs a reminder, you're using offline mode (--offline) \ - which can sometimes cause surprising resolution failures, \ - if this error is too confusing you may wish to retry \ - without the offline flag.", - ); - } - } - - to_resolve_err(anyhow::format_err!("{}", msg)) -} - -/// Returns String representation of dependency chain for a particular `pkgid` -/// within given context. -pub(super) fn describe_path_in_context(cx: &Context, id: &PackageId) -> String { - let iter = cx - .parents - .path_to_bottom(id) - .into_iter() - .map(|(p, d)| (p, d.and_then(|d| d.iter().next()))); - describe_path(iter) -} - -/// Returns String representation of dependency chain for a particular `pkgid`. -/// -/// Note that all elements of `path` iterator should have `Some` dependency -/// except the first one. It would look like: -/// -/// (pkg0, None) -/// -> (pkg1, dep from pkg1 satisfied by pkg0) -/// -> (pkg2, dep from pkg2 satisfied by pkg1) -/// -> ... -pub(crate) fn describe_path<'a>( - mut path: impl Iterator)>, -) -> String { - use std::fmt::Write; - - if let Some(p) = path.next() { - let mut dep_path_desc = format!("package `{}`", p.0); - for (pkg, dep) in path { - let dep = dep.unwrap(); - let source_kind = if dep.source_id().is_path() { - "path " - } else if dep.source_id().is_git() { - "git " - } else { - "" - }; - let requirement = if source_kind.is_empty() { - format!("{} = \"{}\"", dep.name_in_toml(), dep.version_req()) - } else { - dep.name_in_toml().to_string() - }; - let locked_version = dep - .version_req() - .locked_version() - .map(|v| format!("(locked to {}) ", v)) - .unwrap_or_default(); - - write!( - dep_path_desc, - "\n ... which satisfies {}dependency `{}` {}of package `{}`", - source_kind, requirement, locked_version, pkg - ) - .unwrap(); - } - - return dep_path_desc; - } - - String::new() -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/features.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/features.rs deleted file mode 100644 index 31b6c7833..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/features.rs +++ /dev/null @@ -1,844 +0,0 @@ -//! Feature resolver. -//! -//! This is a new feature resolver that runs independently of the main -//! dependency resolver. It is enabled when the user specifies `resolver = -//! "2"` in `Cargo.toml`. -//! -//! One of its key characteristics is that it can avoid unifying features for -//! shared dependencies in some situations. See `FeatureOpts` for the -//! different behaviors that can be enabled. If no extra options are enabled, -//! then it should behave exactly the same as the dependency resolver's -//! feature resolution. This can be verified by setting the -//! `__CARGO_FORCE_NEW_FEATURES=compare` environment variable and running -//! Cargo's test suite (or building other projects), and checking if it -//! panics. Note: the `features2` tests will fail because they intentionally -//! compare the old vs new behavior, so forcing the old behavior will -//! naturally fail the tests. -//! -//! The preferred way to engage this new resolver is via -//! `resolve_ws_with_opts`. -//! -//! This does not *replace* feature resolution in the dependency resolver, but -//! instead acts as a second pass which can *narrow* the features selected in -//! the dependency resolver. The dependency resolver still needs to do its own -//! feature resolution in order to avoid selecting optional dependencies that -//! are never enabled. The dependency resolver could, in theory, just assume -//! all optional dependencies on all packages are enabled (and remove all -//! knowledge of features), but that could introduce new requirements that -//! might change old behavior or cause conflicts. Maybe some day in the future -//! we could experiment with that, but it seems unlikely to work or be all -//! that helpful. -//! -//! There are many assumptions made about the dependency resolver. This -//! feature resolver assumes validation has already been done on the feature -//! maps, and doesn't do any validation itself. It assumes dev-dependencies -//! within a dependency have been removed. There are probably other -//! assumptions that I am forgetting. - -use crate::core::compiler::{CompileKind, RustcTargetData}; -use crate::core::dependency::{DepKind, Dependency}; -use crate::core::resolver::types::FeaturesSet; -use crate::core::resolver::{Resolve, ResolveBehavior}; -use crate::core::{FeatureValue, PackageId, PackageIdSpec, PackageSet, Workspace}; -use crate::util::interning::InternedString; -use crate::util::CargoResult; -use anyhow::bail; -use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; -use std::rc::Rc; - -/// Map of activated features. -/// -/// The key is `(PackageId, bool)` where the bool is `true` if these -/// are features for a build dependency or proc-macro. -type ActivateMap = HashMap<(PackageId, bool), BTreeSet>; - -/// Set of all activated features for all packages in the resolve graph. -pub struct ResolvedFeatures { - activated_features: ActivateMap, - /// Optional dependencies that should be built. - /// - /// The value is the `name_in_toml` of the dependencies. - activated_dependencies: ActivateMap, - /// This is only here for legacy support when the new resolver is not enabled. - /// - /// This is the set of features enabled for each package. - legacy_features: Option>>, - /// This is only here for legacy support when the new resolver is not enabled. - /// - /// This is the set of optional dependencies enabled for each package. - legacy_dependencies: Option>>, - opts: FeatureOpts, -} - -/// Options for how the feature resolver works. -#[derive(Default)] -pub struct FeatureOpts { - /// Use the new resolver instead of the old one. - new_resolver: bool, - /// Build deps and proc-macros will not share share features with other dep kinds. - decouple_host_deps: bool, - /// Dev dep features will not be activated unless needed. - decouple_dev_deps: bool, - /// Targets that are not in use will not activate features. - ignore_inactive_targets: bool, - /// If enabled, compare against old resolver (for testing). - compare: bool, -} - -/// Flag to indicate if Cargo is building *any* dev units (tests, examples, etc.). -/// -/// This disables decoupling of dev dependencies. It may be possible to relax -/// this in the future, but it will require significant changes to how unit -/// dependencies are computed, and can result in longer build times with -/// `cargo test` because the lib may need to be built 3 times instead of -/// twice. -#[derive(Copy, Clone, PartialEq)] -pub enum HasDevUnits { - Yes, - No, -} - -/// Flag to indicate that target-specific filtering should be disabled. -#[derive(Copy, Clone, PartialEq)] -pub enum ForceAllTargets { - Yes, - No, -} - -/// Flag to indicate if features are requested for a build dependency or not. -#[derive(Copy, Clone, Debug, PartialEq)] -pub enum FeaturesFor { - NormalOrDev, - /// Build dependency or proc-macro. - HostDep, -} - -impl FeaturesFor { - pub fn from_for_host(for_host: bool) -> FeaturesFor { - if for_host { - FeaturesFor::HostDep - } else { - FeaturesFor::NormalOrDev - } - } -} - -impl FeatureOpts { - pub fn new( - ws: &Workspace<'_>, - has_dev_units: HasDevUnits, - force_all_targets: ForceAllTargets, - ) -> CargoResult { - let mut opts = FeatureOpts::default(); - let unstable_flags = ws.config().cli_unstable(); - let mut enable = |feat_opts: &Vec| { - opts.new_resolver = true; - for opt in feat_opts { - match opt.as_ref() { - "build_dep" | "host_dep" => opts.decouple_host_deps = true, - "dev_dep" => opts.decouple_dev_deps = true, - "itarget" => opts.ignore_inactive_targets = true, - "all" => { - opts.decouple_host_deps = true; - opts.decouple_dev_deps = true; - opts.ignore_inactive_targets = true; - } - "compare" => opts.compare = true, - "ws" => unimplemented!(), - s => bail!("-Zfeatures flag `{}` is not supported", s), - } - } - Ok(()) - }; - if let Some(feat_opts) = unstable_flags.features.as_ref() { - enable(feat_opts)?; - } - match ws.resolve_behavior() { - ResolveBehavior::V1 => {} - ResolveBehavior::V2 => { - enable(&vec!["all".to_string()]).unwrap(); - } - } - // This env var is intended for testing only. - if let Ok(env_opts) = std::env::var("__CARGO_FORCE_NEW_FEATURES") { - if env_opts == "1" { - opts.new_resolver = true; - } else { - let env_opts = env_opts.split(',').map(|s| s.to_string()).collect(); - enable(&env_opts)?; - } - } - if let HasDevUnits::Yes = has_dev_units { - // Dev deps cannot be decoupled when they are in use. - opts.decouple_dev_deps = false; - } - if let ForceAllTargets::Yes = force_all_targets { - opts.ignore_inactive_targets = false; - } - if unstable_flags.weak_dep_features { - // Force this ON because it only works with the new resolver. - opts.new_resolver = true; - } - Ok(opts) - } - - /// Creates a new FeatureOpts for the given behavior. - pub fn new_behavior(behavior: ResolveBehavior, has_dev_units: HasDevUnits) -> FeatureOpts { - match behavior { - ResolveBehavior::V1 => FeatureOpts::default(), - ResolveBehavior::V2 => FeatureOpts { - new_resolver: true, - decouple_host_deps: true, - decouple_dev_deps: has_dev_units == HasDevUnits::No, - ignore_inactive_targets: true, - compare: false, - }, - } - } -} - -/// Features flags requested for a package. -/// -/// This should be cheap and fast to clone, it is used in the resolver for -/// various caches. -/// -/// This is split into enum variants because the resolver needs to handle -/// features coming from different places (command-line and dependency -/// declarations), but those different places have different constraints on -/// which syntax is allowed. This helps ensure that every place dealing with -/// features is properly handling those syntax restrictions. -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub enum RequestedFeatures { - /// Features requested on the command-line with flags. - CliFeatures(CliFeatures), - /// Features specified in a dependency declaration. - DepFeatures { - /// The `features` dependency field. - features: FeaturesSet, - /// The `default-features` dependency field. - uses_default_features: bool, - }, -} - -/// Features specified on the command-line. -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub struct CliFeatures { - /// Features from the `--features` flag. - pub features: Rc>, - /// The `--all-features` flag. - pub all_features: bool, - /// Inverse of `--no-default-features` flag. - pub uses_default_features: bool, -} - -impl CliFeatures { - /// Creates a new CliFeatures from the given command-line flags. - pub fn from_command_line( - features: &[String], - all_features: bool, - uses_default_features: bool, - ) -> CargoResult { - let features = Rc::new(CliFeatures::split_features(features)); - // Some early validation to ensure correct syntax. - for feature in features.iter() { - match feature { - // Maybe call validate_feature_name here once it is an error? - FeatureValue::Feature(_) => {} - FeatureValue::Dep { .. } => { - bail!( - "feature `{}` is not allowed to use explicit `dep:` syntax", - feature - ); - } - FeatureValue::DepFeature { dep_feature, .. } => { - if dep_feature.contains('/') { - bail!("multiple slashes in feature `{}` is not allowed", feature); - } - } - } - } - Ok(CliFeatures { - features, - all_features, - uses_default_features, - }) - } - - /// Creates a new CliFeatures with the given `all_features` setting. - pub fn new_all(all_features: bool) -> CliFeatures { - CliFeatures { - features: Rc::new(BTreeSet::new()), - all_features, - uses_default_features: true, - } - } - - fn split_features(features: &[String]) -> BTreeSet { - features - .iter() - .flat_map(|s| s.split_whitespace()) - .flat_map(|s| s.split(',')) - .filter(|s| !s.is_empty()) - .map(InternedString::new) - .map(FeatureValue::new) - .collect() - } -} - -impl ResolvedFeatures { - /// Returns the list of features that are enabled for the given package. - pub fn activated_features( - &self, - pkg_id: PackageId, - features_for: FeaturesFor, - ) -> Vec { - self.activated_features_int(pkg_id, features_for) - .expect("activated_features for invalid package") - } - - /// Returns if the given dependency should be included. - /// - /// This handles dependencies disabled via `cfg` expressions and optional - /// dependencies which are not enabled. - pub fn is_dep_activated( - &self, - pkg_id: PackageId, - features_for: FeaturesFor, - dep_name: InternedString, - ) -> bool { - if let Some(legacy) = &self.legacy_dependencies { - legacy - .get(&pkg_id) - .map(|deps| deps.contains(&dep_name)) - .unwrap_or(false) - } else { - let is_build = self.opts.decouple_host_deps && features_for == FeaturesFor::HostDep; - self.activated_dependencies - .get(&(pkg_id, is_build)) - .map(|deps| deps.contains(&dep_name)) - .unwrap_or(false) - } - } - - /// Variant of `activated_features` that returns `None` if this is - /// not a valid pkg_id/is_build combination. Used in places which do - /// not know which packages are activated (like `cargo clean`). - pub fn activated_features_unverified( - &self, - pkg_id: PackageId, - features_for: FeaturesFor, - ) -> Option> { - self.activated_features_int(pkg_id, features_for).ok() - } - - fn activated_features_int( - &self, - pkg_id: PackageId, - features_for: FeaturesFor, - ) -> CargoResult> { - if let Some(legacy) = &self.legacy_features { - Ok(legacy.get(&pkg_id).map_or_else(Vec::new, |v| v.clone())) - } else { - let is_build = self.opts.decouple_host_deps && features_for == FeaturesFor::HostDep; - if let Some(fs) = self.activated_features.get(&(pkg_id, is_build)) { - Ok(fs.iter().cloned().collect()) - } else { - bail!("features did not find {:?} {:?}", pkg_id, is_build) - } - } - } - - /// Compares the result against the original resolver behavior. - /// - /// Used by `cargo fix --edition` to display any differences. - pub fn compare_legacy(&self, legacy: &ResolvedFeatures) -> DiffMap { - let legacy_features = legacy.legacy_features.as_ref().unwrap(); - self.activated_features - .iter() - .filter_map(|((pkg_id, for_host), new_features)| { - let old_features = match legacy_features.get(pkg_id) { - Some(feats) => feats.iter().cloned().collect(), - None => BTreeSet::new(), - }; - // The new resolver should never add features. - assert_eq!(new_features.difference(&old_features).next(), None); - let removed_features: BTreeSet<_> = - old_features.difference(new_features).cloned().collect(); - if removed_features.is_empty() { - None - } else { - Some(((*pkg_id, *for_host), removed_features)) - } - }) - .collect() - } -} - -/// Map of differences. -/// -/// Key is `(pkg_id, for_host)`. Value is a set of features or dependencies removed. -pub type DiffMap = BTreeMap<(PackageId, bool), BTreeSet>; - -pub struct FeatureResolver<'a, 'cfg> { - ws: &'a Workspace<'cfg>, - target_data: &'a RustcTargetData<'cfg>, - /// The platforms to build for, requested by the user. - requested_targets: &'a [CompileKind], - resolve: &'a Resolve, - package_set: &'a PackageSet<'cfg>, - /// Options that change how the feature resolver operates. - opts: FeatureOpts, - /// Map of features activated for each package. - activated_features: ActivateMap, - /// Map of optional dependencies activated for each package. - activated_dependencies: ActivateMap, - /// Keeps track of which packages have had its dependencies processed. - /// Used to avoid cycles, and to speed up processing. - processed_deps: HashSet<(PackageId, bool)>, - /// If this is `true`, then `for_host` needs to be tracked while - /// traversing the graph. - /// - /// This is only here to avoid calling `is_proc_macro` when all feature - /// options are disabled (because `is_proc_macro` can trigger downloads). - /// This has to be separate from `FeatureOpts.decouple_host_deps` because - /// `for_host` tracking is also needed for `itarget` to work properly. - track_for_host: bool, - /// `dep_name?/feat_name` features that will be activated if `dep_name` is - /// ever activated. - /// - /// The key is the `(package, for_host, dep_name)` of the package whose - /// dependency will trigger the addition of new features. The value is the - /// set of features to activate. - deferred_weak_dependencies: HashMap<(PackageId, bool, InternedString), HashSet>, -} - -impl<'a, 'cfg> FeatureResolver<'a, 'cfg> { - /// Runs the resolution algorithm and returns a new `ResolvedFeatures` - /// with the result. - pub fn resolve( - ws: &Workspace<'cfg>, - target_data: &RustcTargetData<'cfg>, - resolve: &Resolve, - package_set: &'a PackageSet<'cfg>, - cli_features: &CliFeatures, - specs: &[PackageIdSpec], - requested_targets: &[CompileKind], - opts: FeatureOpts, - ) -> CargoResult { - use crate::util::profile; - let _p = profile::start("resolve features"); - - if !opts.new_resolver { - // Legacy mode. - return Ok(ResolvedFeatures { - activated_features: HashMap::new(), - activated_dependencies: HashMap::new(), - legacy_features: Some(resolve.features_clone()), - legacy_dependencies: Some(compute_legacy_deps(resolve)), - opts, - }); - } - let track_for_host = opts.decouple_host_deps || opts.ignore_inactive_targets; - let mut r = FeatureResolver { - ws, - target_data, - requested_targets, - resolve, - package_set, - opts, - activated_features: HashMap::new(), - activated_dependencies: HashMap::new(), - processed_deps: HashSet::new(), - track_for_host, - deferred_weak_dependencies: HashMap::new(), - }; - r.do_resolve(specs, cli_features)?; - log::debug!("features={:#?}", r.activated_features); - if r.opts.compare { - r.compare(); - } - Ok(ResolvedFeatures { - activated_features: r.activated_features, - activated_dependencies: r.activated_dependencies, - legacy_features: None, - legacy_dependencies: None, - opts: r.opts, - }) - } - - /// Performs the process of resolving all features for the resolve graph. - fn do_resolve( - &mut self, - specs: &[PackageIdSpec], - cli_features: &CliFeatures, - ) -> CargoResult<()> { - let member_features = self.ws.members_with_features(specs, cli_features)?; - for (member, cli_features) in &member_features { - let fvs = self.fvs_from_requested(member.package_id(), cli_features); - let for_host = self.track_for_host && self.is_proc_macro(member.package_id()); - self.activate_pkg(member.package_id(), for_host, &fvs)?; - if for_host { - // Also activate without for_host. This is needed if the - // proc-macro includes other targets (like binaries or tests), - // or running in `cargo test`. Note that in a workspace, if - // the proc-macro is selected on the command like (like with - // `--workspace`), this forces feature unification with normal - // dependencies. This is part of the bigger problem where - // features depend on which packages are built. - self.activate_pkg(member.package_id(), false, &fvs)?; - } - } - Ok(()) - } - - fn activate_pkg( - &mut self, - pkg_id: PackageId, - for_host: bool, - fvs: &[FeatureValue], - ) -> CargoResult<()> { - log::trace!("activate_pkg {} {}", pkg_id.name(), for_host); - // Add an empty entry to ensure everything is covered. This is intended for - // finding bugs where the resolver missed something it should have visited. - // Remove this in the future if `activated_features` uses an empty default. - self.activated_features - .entry((pkg_id, self.opts.decouple_host_deps && for_host)) - .or_insert_with(BTreeSet::new); - for fv in fvs { - self.activate_fv(pkg_id, for_host, fv)?; - } - if !self.processed_deps.insert((pkg_id, for_host)) { - // Already processed dependencies. There's no need to process them - // again. This is primarily to avoid cycles, but also helps speed - // things up. - // - // This is safe because if another package comes along and adds a - // feature on this package, it will immediately add it (in - // `activate_fv`), and recurse as necessary right then and there. - // For example, consider we've already processed our dependencies, - // and another package comes along and enables one of our optional - // dependencies, it will do so immediately in the - // `FeatureValue::DepFeature` branch, and then immediately - // recurse into that optional dependency. This also holds true for - // features that enable other features. - return Ok(()); - } - for (dep_pkg_id, deps) in self.deps(pkg_id, for_host) { - for (dep, dep_for_host) in deps { - if dep.is_optional() { - // Optional dependencies are enabled in `activate_fv` when - // a feature enables it. - continue; - } - // Recurse into the dependency. - let fvs = self.fvs_from_dependency(dep_pkg_id, dep); - self.activate_pkg(dep_pkg_id, dep_for_host, &fvs)?; - } - } - Ok(()) - } - - /// Activate a single FeatureValue for a package. - fn activate_fv( - &mut self, - pkg_id: PackageId, - for_host: bool, - fv: &FeatureValue, - ) -> CargoResult<()> { - log::trace!("activate_fv {} {} {}", pkg_id.name(), for_host, fv); - match fv { - FeatureValue::Feature(f) => { - self.activate_rec(pkg_id, for_host, *f)?; - } - FeatureValue::Dep { dep_name } => { - self.activate_dependency(pkg_id, for_host, *dep_name)?; - } - FeatureValue::DepFeature { - dep_name, - dep_feature, - weak, - } => { - self.activate_dep_feature(pkg_id, for_host, *dep_name, *dep_feature, *weak)?; - } - } - Ok(()) - } - - /// Activate the given feature for the given package, and then recursively - /// activate any other features that feature enables. - fn activate_rec( - &mut self, - pkg_id: PackageId, - for_host: bool, - feature_to_enable: InternedString, - ) -> CargoResult<()> { - log::trace!( - "activate_rec {} {} feat={}", - pkg_id.name(), - for_host, - feature_to_enable - ); - let enabled = self - .activated_features - .entry((pkg_id, self.opts.decouple_host_deps && for_host)) - .or_insert_with(BTreeSet::new); - if !enabled.insert(feature_to_enable) { - // Already enabled. - return Ok(()); - } - let summary = self.resolve.summary(pkg_id); - let feature_map = summary.features(); - let fvs = match feature_map.get(&feature_to_enable) { - Some(fvs) => fvs, - None => { - // TODO: this should only happen for optional dependencies. - // Other cases should be validated by Summary's `build_feature_map`. - // Figure out some way to validate this assumption. - log::debug!( - "pkg {:?} does not define feature {}", - pkg_id, - feature_to_enable - ); - return Ok(()); - } - }; - for fv in fvs { - self.activate_fv(pkg_id, for_host, fv)?; - } - Ok(()) - } - - /// Activate a dependency (`dep:dep_name` syntax). - fn activate_dependency( - &mut self, - pkg_id: PackageId, - for_host: bool, - dep_name: InternedString, - ) -> CargoResult<()> { - // Mark this dependency as activated. - let save_for_host = self.opts.decouple_host_deps && for_host; - self.activated_dependencies - .entry((pkg_id, save_for_host)) - .or_default() - .insert(dep_name); - // Check for any deferred features. - let to_enable = self - .deferred_weak_dependencies - .remove(&(pkg_id, for_host, dep_name)); - // Activate the optional dep. - for (dep_pkg_id, deps) in self.deps(pkg_id, for_host) { - for (dep, dep_for_host) in deps { - if dep.name_in_toml() != dep_name { - continue; - } - if let Some(to_enable) = &to_enable { - for dep_feature in to_enable { - log::trace!( - "activate deferred {} {} -> {}/{}", - pkg_id.name(), - for_host, - dep_name, - dep_feature - ); - let fv = FeatureValue::new(*dep_feature); - self.activate_fv(dep_pkg_id, dep_for_host, &fv)?; - } - } - let fvs = self.fvs_from_dependency(dep_pkg_id, dep); - self.activate_pkg(dep_pkg_id, dep_for_host, &fvs)?; - } - } - Ok(()) - } - - /// Activate a feature within a dependency (`dep_name/feat_name` syntax). - fn activate_dep_feature( - &mut self, - pkg_id: PackageId, - for_host: bool, - dep_name: InternedString, - dep_feature: InternedString, - weak: bool, - ) -> CargoResult<()> { - for (dep_pkg_id, deps) in self.deps(pkg_id, for_host) { - for (dep, dep_for_host) in deps { - if dep.name_in_toml() != dep_name { - continue; - } - if dep.is_optional() { - let save_for_host = self.opts.decouple_host_deps && for_host; - if weak - && !self - .activated_dependencies - .get(&(pkg_id, save_for_host)) - .map(|deps| deps.contains(&dep_name)) - .unwrap_or(false) - { - // This is weak, but not yet activated. Defer in case - // something comes along later and enables it. - log::trace!( - "deferring feature {} {} -> {}/{}", - pkg_id.name(), - for_host, - dep_name, - dep_feature - ); - self.deferred_weak_dependencies - .entry((pkg_id, for_host, dep_name)) - .or_default() - .insert(dep_feature); - continue; - } - - // Activate the dependency on self. - let fv = FeatureValue::Dep { dep_name }; - self.activate_fv(pkg_id, for_host, &fv)?; - if !weak { - // The old behavior before weak dependencies were - // added is to also enables a feature of the same - // name. - self.activate_rec(pkg_id, for_host, dep_name)?; - } - } - // Activate the feature on the dependency. - let fv = FeatureValue::new(dep_feature); - self.activate_fv(dep_pkg_id, dep_for_host, &fv)?; - } - } - Ok(()) - } - - /// Returns Vec of FeatureValues from a Dependency definition. - fn fvs_from_dependency(&self, dep_id: PackageId, dep: &Dependency) -> Vec { - let summary = self.resolve.summary(dep_id); - let feature_map = summary.features(); - let mut result: Vec = dep - .features() - .iter() - .map(|f| FeatureValue::new(*f)) - .collect(); - let default = InternedString::new("default"); - if dep.uses_default_features() && feature_map.contains_key(&default) { - result.push(FeatureValue::Feature(default)); - } - result - } - - /// Returns Vec of FeatureValues from a set of command-line features. - fn fvs_from_requested( - &self, - pkg_id: PackageId, - cli_features: &CliFeatures, - ) -> Vec { - let summary = self.resolve.summary(pkg_id); - let feature_map = summary.features(); - if cli_features.all_features { - feature_map - .keys() - .map(|k| FeatureValue::Feature(*k)) - .collect() - } else { - let mut result: Vec = cli_features.features.iter().cloned().collect(); - let default = InternedString::new("default"); - if cli_features.uses_default_features && feature_map.contains_key(&default) { - result.push(FeatureValue::Feature(default)); - } - result - } - } - - /// Returns the dependencies for a package, filtering out inactive targets. - fn deps( - &self, - pkg_id: PackageId, - for_host: bool, - ) -> Vec<(PackageId, Vec<(&'a Dependency, bool)>)> { - // Helper for determining if a platform is activated. - let platform_activated = |dep: &Dependency| -> bool { - // We always care about build-dependencies, and they are always - // Host. If we are computing dependencies "for a build script", - // even normal dependencies are host-only. - if for_host || dep.is_build() { - return self - .target_data - .dep_platform_activated(dep, CompileKind::Host); - } - // Not a build dependency, and not for a build script, so must be Target. - self.requested_targets - .iter() - .any(|kind| self.target_data.dep_platform_activated(dep, *kind)) - }; - self.resolve - .deps(pkg_id) - .map(|(dep_id, deps)| { - let deps = deps - .iter() - .filter(|dep| { - if dep.platform().is_some() - && self.opts.ignore_inactive_targets - && !platform_activated(dep) - { - return false; - } - if self.opts.decouple_dev_deps && dep.kind() == DepKind::Development { - return false; - } - true - }) - .map(|dep| { - let dep_for_host = self.track_for_host - && (for_host || dep.is_build() || self.is_proc_macro(dep_id)); - (dep, dep_for_host) - }) - .collect::>(); - (dep_id, deps) - }) - .filter(|(_id, deps)| !deps.is_empty()) - .collect() - } - - /// Compare the activated features to the resolver. Used for testing. - fn compare(&self) { - let mut found = false; - for ((pkg_id, dep_kind), features) in &self.activated_features { - let r_features = self.resolve.features(*pkg_id); - if !r_features.iter().eq(features.iter()) { - crate::drop_eprintln!( - self.ws.config(), - "{}/{:?} features mismatch\nresolve: {:?}\nnew: {:?}\n", - pkg_id, - dep_kind, - r_features, - features - ); - found = true; - } - } - if found { - panic!("feature mismatch"); - } - } - - fn is_proc_macro(&self, package_id: PackageId) -> bool { - self.package_set - .get_one(package_id) - .expect("packages downloaded") - .proc_macro() - } -} - -/// Computes a map of PackageId to the set of optional dependencies that are -/// enabled for that dep (when the new resolver is not enabled). -fn compute_legacy_deps(resolve: &Resolve) -> HashMap> { - let mut result: HashMap> = HashMap::new(); - for pkg_id in resolve.iter() { - for (_dep_id, deps) in resolve.deps(pkg_id) { - for dep in deps { - if dep.is_optional() { - result.entry(pkg_id).or_default().insert(dep.name_in_toml()); - } - } - } - } - result -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/mod.rs deleted file mode 100644 index 28b328132..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/mod.rs +++ /dev/null @@ -1,1091 +0,0 @@ -//! Resolution of the entire dependency graph for a crate. -//! -//! This module implements the core logic in taking the world of crates and -//! constraints and creating a resolved graph with locked versions for all -//! crates and their dependencies. This is separate from the registry module -//! which is more worried about discovering crates from various sources, this -//! module just uses the Registry trait as a source to learn about crates from. -//! -//! Actually solving a constraint graph is an NP-hard problem. This algorithm -//! is basically a nice heuristic to make sure we get roughly the best answer -//! most of the time. The constraints that we're working with are: -//! -//! 1. Each crate can have any number of dependencies. Each dependency can -//! declare a version range that it is compatible with. -//! 2. Crates can be activated with multiple version (e.g., show up in the -//! dependency graph twice) so long as each pairwise instance have -//! semver-incompatible versions. -//! -//! The algorithm employed here is fairly simple, we simply do a DFS, activating -//! the "newest crate" (highest version) first and then going to the next -//! option. The heuristics we employ are: -//! -//! * Never try to activate a crate version which is incompatible. This means we -//! only try crates which will actually satisfy a dependency and we won't ever -//! try to activate a crate that's semver compatible with something else -//! activated (as we're only allowed to have one) nor try to activate a crate -//! that has the same links attribute as something else -//! activated. -//! * Always try to activate the highest version crate first. The default -//! dependency in Cargo (e.g., when you write `foo = "0.1.2"`) is -//! semver-compatible, so selecting the highest version possible will allow us -//! to hopefully satisfy as many dependencies at once. -//! -//! Beyond that, what's implemented below is just a naive backtracking version -//! which should in theory try all possible combinations of dependencies and -//! versions to see if one works. The first resolution that works causes -//! everything to bail out immediately and return success, and only if *nothing* -//! works do we actually return an error up the stack. -//! -//! ## Performance -//! -//! Note that this is a relatively performance-critical portion of Cargo. The -//! data that we're processing is proportional to the size of the dependency -//! graph, which can often be quite large (e.g., take a look at Servo). To make -//! matters worse the DFS algorithm we're implemented is inherently quite -//! inefficient. When we add the requirement of backtracking on top it means -//! that we're implementing something that probably shouldn't be allocating all -//! over the place. - -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::mem; -use std::rc::Rc; -use std::time::{Duration, Instant}; - -use log::{debug, trace}; - -use crate::core::PackageIdSpec; -use crate::core::{Dependency, PackageId, Registry, Summary}; -use crate::util::config::Config; -use crate::util::errors::CargoResult; -use crate::util::profile; - -use self::context::Context; -use self::dep_cache::RegistryQueryer; -use self::features::RequestedFeatures; -use self::types::{ConflictMap, ConflictReason, DepsFrame}; -use self::types::{FeaturesSet, RcVecIter, RemainingDeps, ResolverProgress}; - -pub use self::encode::Metadata; -pub use self::encode::{EncodableDependency, EncodablePackageId, EncodableResolve}; -pub use self::errors::{ActivateError, ActivateResult, ResolveError}; -pub use self::features::{CliFeatures, ForceAllTargets, HasDevUnits}; -pub use self::resolve::{Resolve, ResolveVersion}; -pub use self::types::{ResolveBehavior, ResolveOpts}; -pub use self::version_prefs::{VersionOrdering, VersionPreferences}; - -mod conflict_cache; -mod context; -mod dep_cache; -mod encode; -pub(crate) mod errors; -pub mod features; -mod resolve; -mod types; -mod version_prefs; - -/// Builds the list of all packages required to build the first argument. -/// -/// * `summaries` - the list of package summaries along with how to resolve -/// their features. This is a list of all top-level packages that are intended -/// to be part of the lock file (resolve output). These typically are a list -/// of all workspace members. -/// -/// * `replacements` - this is a list of `[replace]` directives found in the -/// root of the workspace. The list here is a `PackageIdSpec` of what to -/// replace and a `Dependency` to replace that with. In general it's not -/// recommended to use `[replace]` any more and use `[patch]` instead, which -/// is supported elsewhere. -/// -/// * `registry` - this is the source from which all package summaries are -/// loaded. It's expected that this is extensively configured ahead of time -/// and is idempotent with our requests to it (aka returns the same results -/// for the same query every time). Typically this is an instance of a -/// `PackageRegistry`. -/// -/// * `version_prefs` - this represents a preference for some versions over others, -/// based on the lock file or other reasons such as `[patch]`es. -/// -/// * `config` - a location to print warnings and such, or `None` if no warnings -/// should be printed -/// -/// * `check_public_visible_dependencies` - a flag for whether to enforce the restrictions -/// introduced in the "public & private dependencies" RFC (1977). The current implementation -/// makes sure that there is only one version of each name visible to each package. -/// -/// But there are 2 stable ways to directly depend on different versions of the same name. -/// 1. Use the renamed dependencies functionality -/// 2. Use 'cfg({})' dependencies functionality -/// -/// When we have a decision for how to implement is without breaking existing functionality -/// this flag can be removed. -pub fn resolve( - summaries: &[(Summary, ResolveOpts)], - replacements: &[(PackageIdSpec, Dependency)], - registry: &mut dyn Registry, - version_prefs: &VersionPreferences, - config: Option<&Config>, - check_public_visible_dependencies: bool, -) -> CargoResult { - let cx = Context::new(check_public_visible_dependencies); - let _p = profile::start("resolving"); - let minimal_versions = match config { - Some(config) => config.cli_unstable().minimal_versions, - None => false, - }; - let mut registry = - RegistryQueryer::new(registry, replacements, version_prefs, minimal_versions); - let cx = activate_deps_loop(cx, &mut registry, summaries, config)?; - - let mut cksums = HashMap::new(); - for (summary, _) in cx.activations.values() { - let cksum = summary.checksum().map(|s| s.to_string()); - cksums.insert(summary.package_id(), cksum); - } - let graph = cx.graph(); - let replacements = cx.resolve_replacements(®istry); - let features = cx - .resolve_features - .iter() - .map(|(k, v)| (*k, v.iter().cloned().collect())) - .collect(); - let summaries = cx - .activations - .into_iter() - .map(|(_key, (summary, _age))| (summary.package_id(), summary)) - .collect(); - let resolve = Resolve::new( - graph, - replacements, - features, - cksums, - BTreeMap::new(), - Vec::new(), - ResolveVersion::default(), - summaries, - ); - - check_cycles(&resolve)?; - check_duplicate_pkgs_in_lockfile(&resolve)?; - trace!("resolved: {:?}", resolve); - - Ok(resolve) -} - -/// Recursively activates the dependencies for `summaries`, in depth-first order, -/// backtracking across possible candidates for each dependency as necessary. -/// -/// If all dependencies can be activated and resolved to a version in the -/// dependency graph, `cx` is returned. -fn activate_deps_loop( - mut cx: Context, - registry: &mut RegistryQueryer<'_>, - summaries: &[(Summary, ResolveOpts)], - config: Option<&Config>, -) -> CargoResult { - let mut backtrack_stack = Vec::new(); - let mut remaining_deps = RemainingDeps::new(); - - // `past_conflicting_activations` is a cache of the reasons for each time we - // backtrack. - let mut past_conflicting_activations = conflict_cache::ConflictCache::new(); - - // Activate all the initial summaries to kick off some work. - for &(ref summary, ref opts) in summaries { - debug!("initial activation: {}", summary.package_id()); - let res = activate(&mut cx, registry, None, summary.clone(), opts); - match res { - Ok(Some((frame, _))) => remaining_deps.push(frame), - Ok(None) => (), - Err(ActivateError::Fatal(e)) => return Err(e), - Err(ActivateError::Conflict(_, _)) => panic!("bad error from activate"), - } - } - - let mut printed = ResolverProgress::new(); - - // Main resolution loop, this is the workhorse of the resolution algorithm. - // - // You'll note that a few stacks are maintained on the side, which might - // seem odd when this algorithm looks like it could be implemented - // recursively. While correct, this is implemented iteratively to avoid - // blowing the stack (the recursion depth is proportional to the size of the - // input). - // - // The general sketch of this loop is to run until there are no dependencies - // left to activate, and for each dependency to attempt to activate all of - // its own dependencies in turn. The `backtrack_stack` is a side table of - // backtracking states where if we hit an error we can return to in order to - // attempt to continue resolving. - while let Some((just_here_for_the_error_messages, frame)) = - remaining_deps.pop_most_constrained() - { - let (mut parent, (mut dep, candidates, mut features)) = frame; - - // If we spend a lot of time here (we shouldn't in most cases) then give - // a bit of a visual indicator as to what we're doing. - printed.shell_status(config)?; - - trace!( - "{}[{}]>{} {} candidates", - parent.name(), - cx.age, - dep.package_name(), - candidates.len() - ); - - let just_here_for_the_error_messages = just_here_for_the_error_messages - && past_conflicting_activations - .conflicting(&cx, &dep) - .is_some(); - - let mut remaining_candidates = RemainingCandidates::new(&candidates); - - // `conflicting_activations` stores all the reasons we were unable to - // activate candidates. One of these reasons will have to go away for - // backtracking to find a place to restart. It is also the list of - // things to explain in the error message if we fail to resolve. - // - // This is a map of package ID to a reason why that packaged caused a - // conflict for us. - let mut conflicting_activations = ConflictMap::new(); - - // When backtracking we don't fully update `conflicting_activations` - // especially for the cases that we didn't make a backtrack frame in the - // first place. This `backtracked` var stores whether we are continuing - // from a restored backtrack frame so that we can skip caching - // `conflicting_activations` in `past_conflicting_activations` - let mut backtracked = false; - - loop { - let next = remaining_candidates.next( - &mut conflicting_activations, - &cx, - &dep, - parent.package_id(), - ); - - let (candidate, has_another) = next.ok_or(()).or_else(|_| { - // If we get here then our `remaining_candidates` was just - // exhausted, so `dep` failed to activate. - // - // It's our job here to backtrack, if possible, and find a - // different candidate to activate. If we can't find any - // candidates whatsoever then it's time to bail entirely. - trace!( - "{}[{}]>{} -- no candidates", - parent.name(), - cx.age, - dep.package_name() - ); - - // Use our list of `conflicting_activations` to add to our - // global list of past conflicting activations, effectively - // globally poisoning `dep` if `conflicting_activations` ever - // shows up again. We'll use the `past_conflicting_activations` - // below to determine if a dependency is poisoned and skip as - // much work as possible. - // - // If we're only here for the error messages then there's no - // need to try this as this dependency is already known to be - // bad. - // - // As we mentioned above with the `backtracked` variable if this - // local is set to `true` then our `conflicting_activations` may - // not be right, so we can't push into our global cache. - let mut generalize_conflicting_activations = None; - if !just_here_for_the_error_messages && !backtracked { - past_conflicting_activations.insert(&dep, &conflicting_activations); - if let Some(c) = generalize_conflicting( - &cx, - registry, - &mut past_conflicting_activations, - &parent, - &dep, - &conflicting_activations, - ) { - generalize_conflicting_activations = Some(c); - } - } - - match find_candidate( - &cx, - &mut backtrack_stack, - &parent, - backtracked, - generalize_conflicting_activations - .as_ref() - .unwrap_or(&conflicting_activations), - ) { - Some((candidate, has_another, frame)) => { - // Reset all of our local variables used with the - // contents of `frame` to complete our backtrack. - cx = frame.context; - remaining_deps = frame.remaining_deps; - remaining_candidates = frame.remaining_candidates; - parent = frame.parent; - dep = frame.dep; - features = frame.features; - conflicting_activations = frame.conflicting_activations; - backtracked = true; - Ok((candidate, has_another)) - } - None => { - debug!("no candidates found"); - Err(errors::activation_error( - &cx, - registry.registry, - &parent, - &dep, - &conflicting_activations, - &candidates, - config, - )) - } - } - })?; - - // If we're only here for the error messages then we know that this - // activation will fail one way or another. To that end if we've got - // more candidates we want to fast-forward to the last one as - // otherwise we'll just backtrack here anyway (helping us to skip - // some work). - if just_here_for_the_error_messages && !backtracked && has_another { - continue; - } - - // We have a `candidate`. Create a `BacktrackFrame` so we can add it - // to the `backtrack_stack` later if activation succeeds. - // - // Note that if we don't actually have another candidate then there - // will be nothing to backtrack to so we skip construction of the - // frame. This is a relatively important optimization as a number of - // the `clone` calls below can be quite expensive, so we avoid them - // if we can. - let backtrack = if has_another { - Some(BacktrackFrame { - context: Context::clone(&cx), - remaining_deps: remaining_deps.clone(), - remaining_candidates: remaining_candidates.clone(), - parent: Summary::clone(&parent), - dep: Dependency::clone(&dep), - features: Rc::clone(&features), - conflicting_activations: conflicting_activations.clone(), - }) - } else { - None - }; - - let pid = candidate.package_id(); - let opts = ResolveOpts { - dev_deps: false, - features: RequestedFeatures::DepFeatures { - features: Rc::clone(&features), - uses_default_features: dep.uses_default_features(), - }, - }; - trace!( - "{}[{}]>{} trying {}", - parent.name(), - cx.age, - dep.package_name(), - candidate.version() - ); - let res = activate(&mut cx, registry, Some((&parent, &dep)), candidate, &opts); - - let successfully_activated = match res { - // Success! We've now activated our `candidate` in our context - // and we're almost ready to move on. We may want to scrap this - // frame in the end if it looks like it's not going to end well, - // so figure that out here. - Ok(Some((mut frame, dur))) => { - printed.elapsed(dur); - - // Our `frame` here is a new package with its own list of - // dependencies. Do a sanity check here of all those - // dependencies by cross-referencing our global - // `past_conflicting_activations`. Recall that map is a - // global cache which lists sets of packages where, when - // activated, the dependency is unresolvable. - // - // If any our our frame's dependencies fit in that bucket, - // aka known unresolvable, then we extend our own set of - // conflicting activations with theirs. We can do this - // because the set of conflicts we found implies the - // dependency can't be activated which implies that we - // ourselves can't be activated, so we know that they - // conflict with us. - let mut has_past_conflicting_dep = just_here_for_the_error_messages; - if !has_past_conflicting_dep { - if let Some(conflicting) = frame - .remaining_siblings - .clone() - .filter_map(|(ref new_dep, _, _)| { - past_conflicting_activations.conflicting(&cx, new_dep) - }) - .next() - { - // If one of our deps is known unresolvable - // then we will not succeed. - // How ever if we are part of the reason that - // one of our deps conflicts then - // we can make a stronger statement - // because we will definitely be activated when - // we try our dep. - conflicting_activations.extend( - conflicting - .iter() - .filter(|&(p, _)| p != &pid) - .map(|(&p, r)| (p, r.clone())), - ); - - has_past_conflicting_dep = true; - } - } - // If any of `remaining_deps` are known unresolvable with - // us activated, then we extend our own set of - // conflicting activations with theirs and its parent. We can do this - // because the set of conflicts we found implies the - // dependency can't be activated which implies that we - // ourselves are incompatible with that dep, so we know that deps - // parent conflict with us. - if !has_past_conflicting_dep { - if let Some(known_related_bad_deps) = - past_conflicting_activations.dependencies_conflicting_with(pid) - { - if let Some((other_parent, conflict)) = remaining_deps - .iter() - // for deps related to us - .filter(|&(_, ref other_dep)| { - known_related_bad_deps.contains(other_dep) - }) - .filter_map(|(other_parent, other_dep)| { - past_conflicting_activations - .find_conflicting(&cx, &other_dep, Some(pid)) - .map(|con| (other_parent, con)) - }) - .next() - { - let rel = conflict.get(&pid).unwrap().clone(); - - // The conflict we found is - // "other dep will not succeed if we are activated." - // We want to add - // "our dep will not succeed if other dep is in remaining_deps" - // but that is not how the cache is set up. - // So we add the less general but much faster, - // "our dep will not succeed if other dep's parent is activated". - conflicting_activations.extend( - conflict - .iter() - .filter(|&(p, _)| p != &pid) - .map(|(&p, r)| (p, r.clone())), - ); - conflicting_activations.insert(other_parent, rel); - has_past_conflicting_dep = true; - } - } - } - - // Ok if we're in a "known failure" state for this frame we - // may want to skip it altogether though. We don't want to - // skip it though in the case that we're displaying error - // messages to the user! - // - // Here we need to figure out if the user will see if we - // skipped this candidate (if it's known to fail, aka has a - // conflicting dep and we're the last candidate). If we're - // here for the error messages, we can't skip it (but we can - // prune extra work). If we don't have any candidates in our - // backtrack stack then we're the last line of defense, so - // we'll want to present an error message for sure. - let activate_for_error_message = has_past_conflicting_dep && !has_another && { - just_here_for_the_error_messages || { - find_candidate( - &cx, - &mut backtrack_stack.clone(), - &parent, - backtracked, - &conflicting_activations, - ) - .is_none() - } - }; - - // If we're only here for the error messages then we know - // one of our candidate deps will fail, meaning we will - // fail and that none of the backtrack frames will find a - // candidate that will help. Consequently let's clean up the - // no longer needed backtrack frames. - if activate_for_error_message { - backtrack_stack.clear(); - } - - // If we don't know for a fact that we'll fail or if we're - // just here for the error message then we push this frame - // onto our list of to-be-resolve, which will generate more - // work for us later on. - // - // Otherwise we're guaranteed to fail and were not here for - // error messages, so we skip work and don't push anything - // onto our stack. - frame.just_for_error_messages = has_past_conflicting_dep; - if !has_past_conflicting_dep || activate_for_error_message { - remaining_deps.push(frame); - true - } else { - trace!( - "{}[{}]>{} skipping {} ", - parent.name(), - cx.age, - dep.package_name(), - pid.version() - ); - false - } - } - - // This candidate's already activated, so there's no extra work - // for us to do. Let's keep going. - Ok(None) => true, - - // We failed with a super fatal error (like a network error), so - // bail out as quickly as possible as we can't reliably - // backtrack from errors like these - Err(ActivateError::Fatal(e)) => return Err(e), - - // We failed due to a bland conflict, bah! Record this in our - // frame's list of conflicting activations as to why this - // candidate failed, and then move on. - Err(ActivateError::Conflict(id, reason)) => { - conflicting_activations.insert(id, reason); - false - } - }; - - // If we've successfully activated then save off the backtrack frame - // if one was created, and otherwise break out of the inner - // activation loop as we're ready to move to the next dependency - if successfully_activated { - backtrack_stack.extend(backtrack); - break; - } - - // We've failed to activate this dependency, oh dear! Our call to - // `activate` above may have altered our `cx` local variable, so - // restore it back if we've got a backtrack frame. - // - // If we don't have a backtrack frame then we're just using the `cx` - // for error messages anyway so we can live with a little - // imprecision. - if let Some(b) = backtrack { - cx = b.context; - } - } - - // Ok phew, that loop was a big one! If we've broken out then we've - // successfully activated a candidate. Our stacks are all in place that - // we're ready to move on to the next dependency that needs activation, - // so loop back to the top of the function here. - } - - Ok(cx) -} - -/// Attempts to activate the summary `candidate` in the context `cx`. -/// -/// This function will pull dependency summaries from the registry provided, and -/// the dependencies of the package will be determined by the `opts` provided. -/// If `candidate` was activated, this function returns the dependency frame to -/// iterate through next. -fn activate( - cx: &mut Context, - registry: &mut RegistryQueryer<'_>, - parent: Option<(&Summary, &Dependency)>, - candidate: Summary, - opts: &ResolveOpts, -) -> ActivateResult> { - let candidate_pid = candidate.package_id(); - cx.age += 1; - if let Some((parent, dep)) = parent { - let parent_pid = parent.package_id(); - // add an edge from candidate to parent in the parents graph - cx.parents - .link(candidate_pid, parent_pid) - // and associate dep with that edge - .insert(dep.clone()); - if let Some(public_dependency) = cx.public_dependency.as_mut() { - public_dependency.add_edge( - candidate_pid, - parent_pid, - dep.is_public(), - cx.age, - &cx.parents, - ); - } - } - - let activated = cx.flag_activated(&candidate, opts, parent)?; - - let candidate = match registry.replacement_summary(candidate_pid) { - Some(replace) => { - // Note the `None` for parent here since `[replace]` is a bit wonky - // and doesn't activate the same things that `[patch]` typically - // does. TBH it basically cause panics in the test suite if - // `parent` is passed through here and `[replace]` is otherwise - // on life support so it's not critical to fix bugs anyway per se. - if cx.flag_activated(replace, opts, None)? && activated { - return Ok(None); - } - trace!( - "activating {} (replacing {})", - replace.package_id(), - candidate_pid - ); - replace.clone() - } - None => { - if activated { - return Ok(None); - } - trace!("activating {}", candidate_pid); - candidate - } - }; - - let now = Instant::now(); - let (used_features, deps) = - &*registry.build_deps(cx, parent.map(|p| p.0.package_id()), &candidate, opts)?; - - // Record what list of features is active for this package. - if !used_features.is_empty() { - Rc::make_mut( - cx.resolve_features - .entry(candidate.package_id()) - .or_insert_with(Rc::default), - ) - .extend(used_features); - } - - let frame = DepsFrame { - parent: candidate, - just_for_error_messages: false, - remaining_siblings: RcVecIter::new(Rc::clone(deps)), - }; - Ok(Some((frame, now.elapsed()))) -} - -#[derive(Clone)] -struct BacktrackFrame { - context: Context, - remaining_deps: RemainingDeps, - remaining_candidates: RemainingCandidates, - parent: Summary, - dep: Dependency, - features: FeaturesSet, - conflicting_activations: ConflictMap, -} - -/// A helper "iterator" used to extract candidates within a current `Context` of -/// a dependency graph. -/// -/// This struct doesn't literally implement the `Iterator` trait (requires a few -/// more inputs) but in general acts like one. Each `RemainingCandidates` is -/// created with a list of candidates to choose from. When attempting to iterate -/// over the list of candidates only *valid* candidates are returned. Validity -/// is defined within a `Context`. -/// -/// Candidates passed to `new` may not be returned from `next` as they could be -/// filtered out, and as they are filtered the causes will be added to `conflicting_prev_active`. -#[derive(Clone)] -struct RemainingCandidates { - remaining: RcVecIter, - // This is an inlined peekable generator - has_another: Option, -} - -impl RemainingCandidates { - fn new(candidates: &Rc>) -> RemainingCandidates { - RemainingCandidates { - remaining: RcVecIter::new(Rc::clone(candidates)), - has_another: None, - } - } - - /// Attempts to find another candidate to check from this list. - /// - /// This method will attempt to move this iterator forward, returning a - /// candidate that's possible to activate. The `cx` argument is the current - /// context which determines validity for candidates returned, and the `dep` - /// is the dependency listing that we're activating for. - /// - /// If successful a `(Candidate, bool)` pair will be returned. The - /// `Candidate` is the candidate to attempt to activate, and the `bool` is - /// an indicator of whether there are remaining candidates to try of if - /// we've reached the end of iteration. - /// - /// If we've reached the end of the iterator here then `Err` will be - /// returned. The error will contain a map of package ID to conflict reason, - /// where each package ID caused a candidate to be filtered out from the - /// original list for the reason listed. - fn next( - &mut self, - conflicting_prev_active: &mut ConflictMap, - cx: &Context, - dep: &Dependency, - parent: PackageId, - ) -> Option<(Summary, bool)> { - for b in self.remaining.by_ref() { - let b_id = b.package_id(); - // The `links` key in the manifest dictates that there's only one - // package in a dependency graph, globally, with that particular - // `links` key. If this candidate links to something that's already - // linked to by a different package then we've gotta skip this. - if let Some(link) = b.links() { - if let Some(&a) = cx.links.get(&link) { - if a != b_id { - conflicting_prev_active - .entry(a) - .or_insert_with(|| ConflictReason::Links(link)); - continue; - } - } - } - - // Otherwise the condition for being a valid candidate relies on - // semver. Cargo dictates that you can't duplicate multiple - // semver-compatible versions of a crate. For example we can't - // simultaneously activate `foo 1.0.2` and `foo 1.2.0`. We can, - // however, activate `1.0.2` and `2.0.0`. - // - // Here we throw out our candidate if it's *compatible*, yet not - // equal, to all previously activated versions. - if let Some((a, _)) = cx.activations.get(&b_id.as_activations_key()) { - if *a != b { - conflicting_prev_active - .entry(a.package_id()) - .or_insert(ConflictReason::Semver); - continue; - } - } - // We may still have to reject do to a public dependency conflict. If one of any of our - // ancestors that can see us already knows about a different crate with this name then - // we have to reject this candidate. Additionally this candidate may already have been - // activated and have public dependants of its own, - // all of witch also need to be checked the same way. - if let Some(public_dependency) = cx.public_dependency.as_ref() { - if let Err(((c1, c2), c3)) = - public_dependency.can_add_edge(b_id, parent, dep.is_public(), &cx.parents) - { - conflicting_prev_active.insert(c1.0, c1.1); - conflicting_prev_active.insert(c2.0, c2.1); - if let Some(c3) = c3 { - conflicting_prev_active.insert(c3.0, c3.1); - } - continue; - } - } - - // Well if we made it this far then we've got a valid dependency. We - // want this iterator to be inherently "peekable" so we don't - // necessarily return the item just yet. Instead we stash it away to - // get returned later, and if we replaced something then that was - // actually the candidate to try first so we return that. - if let Some(r) = mem::replace(&mut self.has_another, Some(b)) { - return Some((r, true)); - } - } - - // Alright we've entirely exhausted our list of candidates. If we've got - // something stashed away return that here (also indicating that there's - // nothing else). - self.has_another.take().map(|r| (r, false)) - } -} - -/// Attempts to find a new conflict that allows a `find_candidate` feather then the input one. -/// It will add the new conflict to the cache if one is found. -/// -/// Panics if the input conflict is not all active in `cx`. -fn generalize_conflicting( - cx: &Context, - registry: &mut RegistryQueryer<'_>, - past_conflicting_activations: &mut conflict_cache::ConflictCache, - parent: &Summary, - dep: &Dependency, - conflicting_activations: &ConflictMap, -) -> Option { - if conflicting_activations.is_empty() { - return None; - } - // We need to determine the `ContextAge` that this `conflicting_activations` will jump to, and why. - let (backtrack_critical_age, backtrack_critical_id) = conflicting_activations - .keys() - .map(|&c| (cx.is_active(c).expect("not currently active!?"), c)) - .max() - .unwrap(); - let backtrack_critical_reason: ConflictReason = - conflicting_activations[&backtrack_critical_id].clone(); - - if backtrack_critical_reason.is_public_dependency() { - return None; - } - - if cx - .parents - .is_path_from_to(&parent.package_id(), &backtrack_critical_id) - { - // We are a descendant of the trigger of the problem. - // The best generalization of this is to let things bubble up - // and let `backtrack_critical_id` figure this out. - return None; - } - // What parents does that critical activation have - for (critical_parent, critical_parents_deps) in - cx.parents.edges(&backtrack_critical_id).filter(|(p, _)| { - // it will only help backjump further if it is older then the critical_age - cx.is_active(**p).expect("parent not currently active!?") < backtrack_critical_age - }) - { - for critical_parents_dep in critical_parents_deps.iter() { - // A dep is equivalent to one of the things it can resolve to. - // Thus, if all the things it can resolve to have already ben determined - // to be conflicting, then we can just say that we conflict with the parent. - if let Some(others) = registry - .query(critical_parents_dep) - .expect("an already used dep now error!?") - .iter() - .rev() // the last one to be tried is the least likely to be in the cache, so start with that. - .map(|other| { - past_conflicting_activations - .find( - dep, - &|id| { - if id == other.package_id() { - // we are imagining that we used other instead - Some(backtrack_critical_age) - } else { - cx.is_active(id) - } - }, - Some(other.package_id()), - // we only care about things that are newer then critical_age - backtrack_critical_age, - ) - .map(|con| (other.package_id(), con)) - }) - .collect::>>() - { - let mut con = conflicting_activations.clone(); - // It is always valid to combine previously inserted conflicts. - // A, B are both known bad states each that can never be activated. - // A + B is redundant but can't be activated, as if - // A + B is active then A is active and we know that is not ok. - for (_, other) in &others { - con.extend(other.iter().map(|(&id, re)| (id, re.clone()))); - } - // Now that we have this combined conflict, we can do a substitution: - // A dep is equivalent to one of the things it can resolve to. - // So we can remove all the things that it resolves to and replace with the parent. - for (other_id, _) in &others { - con.remove(other_id); - } - con.insert(*critical_parent, backtrack_critical_reason); - - if cfg!(debug_assertions) { - // the entire point is to find an older conflict, so let's make sure we did - let new_age = con - .keys() - .map(|&c| cx.is_active(c).expect("not currently active!?")) - .max() - .unwrap(); - assert!( - new_age < backtrack_critical_age, - "new_age {} < backtrack_critical_age {}", - new_age, - backtrack_critical_age - ); - } - past_conflicting_activations.insert(dep, &con); - return Some(con); - } - } - } - None -} - -/// Looks through the states in `backtrack_stack` for dependencies with -/// remaining candidates. For each one, also checks if rolling back -/// could change the outcome of the failed resolution that caused backtracking -/// in the first place. Namely, if we've backtracked past the parent of the -/// failed dep, or any of the packages flagged as giving us trouble in -/// `conflicting_activations`. -/// -/// Read -/// For several more detailed explanations of the logic here. -fn find_candidate( - cx: &Context, - backtrack_stack: &mut Vec, - parent: &Summary, - backtracked: bool, - conflicting_activations: &ConflictMap, -) -> Option<(Summary, bool, BacktrackFrame)> { - // When we're calling this method we know that `parent` failed to - // activate. That means that some dependency failed to get resolved for - // whatever reason. Normally, that means that all of those reasons - // (plus maybe some extras) are listed in `conflicting_activations`. - // - // The abnormal situations are things that do not put all of the reasons in `conflicting_activations`: - // If we backtracked we do not know how our `conflicting_activations` related to - // the cause of that backtrack, so we do not update it. - let age = if !backtracked { - // we don't have abnormal situations. So we can ask `cx` for how far back we need to go. - let a = cx.is_conflicting(Some(parent.package_id()), conflicting_activations); - // If the `conflicting_activations` does not apply to `cx`, then something went very wrong - // in building it. But we will just fall back to laboriously trying all possibilities witch - // will give us the correct answer so only `assert` if there is a developer to debug it. - debug_assert!(a.is_some()); - a - } else { - None - }; - - while let Some(mut frame) = backtrack_stack.pop() { - let next = frame.remaining_candidates.next( - &mut frame.conflicting_activations, - &frame.context, - &frame.dep, - frame.parent.package_id(), - ); - let (candidate, has_another) = match next { - Some(pair) => pair, - None => continue, - }; - - // If all members of `conflicting_activations` are still - // active in this back up we know that we're guaranteed to not actually - // make any progress. As a result if we hit this condition we can - // completely skip this backtrack frame and move on to the next. - if let Some(age) = age { - if frame.context.age >= age { - trace!( - "{} = \"{}\" skip as not solving {}: {:?}", - frame.dep.package_name(), - frame.dep.version_req(), - parent.package_id(), - conflicting_activations - ); - // above we use `cx` to determine that this is still going to be conflicting. - // but lets just double check. - debug_assert!( - frame - .context - .is_conflicting(Some(parent.package_id()), conflicting_activations) - == Some(age) - ); - continue; - } else { - // above we use `cx` to determine that this is not going to be conflicting. - // but lets just double check. - debug_assert!(frame - .context - .is_conflicting(Some(parent.package_id()), conflicting_activations) - .is_none()); - } - } - - return Some((candidate, has_another, frame)); - } - None -} - -fn check_cycles(resolve: &Resolve) -> CargoResult<()> { - // Create a simple graph representation alternative of `resolve` which has - // only the edges we care about. Note that `BTree*` is used to produce - // deterministic error messages here. Also note that the main reason for - // this copy of the resolve graph is to avoid edges between a crate and its - // dev-dependency since that doesn't count for cycles. - let mut graph = BTreeMap::new(); - for id in resolve.iter() { - let map = graph.entry(id).or_insert_with(BTreeMap::new); - for (dep_id, listings) in resolve.deps_not_replaced(id) { - let transitive_dep = listings.iter().find(|d| d.is_transitive()); - - if let Some(transitive_dep) = transitive_dep.cloned() { - map.insert(dep_id, transitive_dep.clone()); - resolve - .replacement(dep_id) - .map(|p| map.insert(p, transitive_dep)); - } - } - } - - // After we have the `graph` that we care about, perform a simple cycle - // check by visiting all nodes. We visit each node at most once and we keep - // track of the path through the graph as we walk it. If we walk onto the - // same node twice that's a cycle. - let mut checked = HashSet::new(); - let mut path = Vec::new(); - let mut visited = HashSet::new(); - for pkg in graph.keys() { - if !checked.contains(pkg) { - visit(&graph, *pkg, &mut visited, &mut path, &mut checked)? - } - } - return Ok(()); - - fn visit( - graph: &BTreeMap>, - id: PackageId, - visited: &mut HashSet, - path: &mut Vec, - checked: &mut HashSet, - ) -> CargoResult<()> { - path.push(id); - if !visited.insert(id) { - let iter = path.iter().rev().skip(1).scan(id, |child, parent| { - let dep = graph.get(parent).and_then(|adjacent| adjacent.get(child)); - *child = *parent; - Some((parent, dep)) - }); - let iter = std::iter::once((&id, None)).chain(iter); - anyhow::bail!( - "cyclic package dependency: package `{}` depends on itself. Cycle:\n{}", - id, - errors::describe_path(iter), - ); - } - - if checked.insert(id) { - for dep in graph[&id].keys() { - visit(graph, *dep, visited, path, checked)?; - } - } - - path.pop(); - visited.remove(&id); - Ok(()) - } -} - -/// Checks that packages are unique when written to lock file. -/// -/// When writing package ID's to lock file, we apply lossy encoding. In -/// particular, we don't store paths of path dependencies. That means that -/// *different* packages may collide in the lock file, hence this check. -fn check_duplicate_pkgs_in_lockfile(resolve: &Resolve) -> CargoResult<()> { - let mut unique_pkg_ids = HashMap::new(); - let state = encode::EncodeState::new(resolve); - for pkg_id in resolve.iter() { - let encodable_pkd_id = encode::encodable_package_id(pkg_id, &state, resolve.version()); - if let Some(prev_pkg_id) = unique_pkg_ids.insert(encodable_pkd_id, pkg_id) { - anyhow::bail!( - "package collision in the lockfile: packages {} and {} are different, \ - but only one can be written to lockfile unambiguously", - prev_pkg_id, - pkg_id - ) - } - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/resolve.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/resolve.rs deleted file mode 100644 index d98056977..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/resolve.rs +++ /dev/null @@ -1,412 +0,0 @@ -use super::encode::Metadata; -use crate::core::dependency::DepKind; -use crate::core::{Dependency, PackageId, PackageIdSpec, Summary, Target}; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::Graph; -use std::borrow::Borrow; -use std::collections::{HashMap, HashSet}; -use std::fmt; - -/// Represents a fully-resolved package dependency graph. Each node in the graph -/// is a package and edges represent dependencies between packages. -/// -/// Each instance of `Resolve` also understands the full set of features used -/// for each package. -pub struct Resolve { - /// A graph, whose vertices are packages and edges are dependency specifications - /// from `Cargo.toml`. We need a `HashSet` here because the same package - /// might be present in both `[dependencies]` and `[build-dependencies]`. - graph: Graph>, - /// Replacements from the `[replace]` table. - replacements: HashMap, - /// Inverted version of `replacements`. - reverse_replacements: HashMap, - /// Features enabled for a given package. - features: HashMap>, - /// Checksum for each package. A SHA256 hash of the `.crate` file used to - /// validate the correct crate file is used. This is `None` for sources - /// that do not use `.crate` files, like path or git dependencies. - checksums: HashMap>, - /// "Unknown" metadata. This is a collection of extra, unrecognized data - /// found in the `[metadata]` section of `Cargo.lock`, preserved for - /// forwards compatibility. - metadata: Metadata, - /// `[patch]` entries that did not match anything, preserved in - /// `Cargo.lock` as the `[[patch.unused]]` table array. Tracking unused - /// patches helps prevent Cargo from being forced to re-update the - /// registry every time it runs, and keeps the resolve in a locked state - /// so it doesn't re-resolve the unused entries. - unused_patches: Vec, - /// A map from packages to a set of their public dependencies - public_dependencies: HashMap>, - /// Version of the `Cargo.lock` format, see - /// `cargo::core::resolver::encode` for more. - version: ResolveVersion, - summaries: HashMap, -} - -/// A version to indicate how a `Cargo.lock` should be serialized. Currently -/// V2 is the default when creating a new lockfile. If a V1 lockfile already -/// exists, it will stay as V1. -/// -/// It's theorized that we can add more here over time to track larger changes -/// to the `Cargo.lock` format, but we've yet to see how that strategy pans out. -#[derive(PartialEq, Eq, Clone, Copy, Debug, PartialOrd, Ord)] -pub enum ResolveVersion { - /// Historical baseline for when this abstraction was added. - V1, - /// A more compact format, more amenable to avoiding source-control merge - /// conflicts. The `dependencies` arrays are compressed and checksums are - /// listed inline. Introduced in 2019 in version 1.38. New lockfiles use - /// V2 by default starting in 1.41. - V2, - /// A format that explicitly lists a `version` at the top of the file as - /// well as changing how git dependencies are encoded. Dependencies with - /// `branch = "master"` are no longer encoded the same way as those without - /// branch specifiers. - V3, -} - -impl Resolve { - pub fn new( - graph: Graph>, - replacements: HashMap, - features: HashMap>, - checksums: HashMap>, - metadata: Metadata, - unused_patches: Vec, - version: ResolveVersion, - summaries: HashMap, - ) -> Resolve { - let reverse_replacements = replacements.iter().map(|(&p, &r)| (r, p)).collect(); - let public_dependencies = graph - .iter() - .map(|p| { - let public_deps = graph - .edges(p) - .filter(|(_, deps)| { - deps.iter() - .any(|d| d.kind() == DepKind::Normal && d.is_public()) - }) - .map(|(dep_package, _)| *dep_package) - .collect::>(); - - (*p, public_deps) - }) - .collect(); - - Resolve { - graph, - replacements, - features, - checksums, - metadata, - unused_patches, - reverse_replacements, - public_dependencies, - version, - summaries, - } - } - - /// Resolves one of the paths from the given dependent package up to - /// the root. - pub fn path_to_top<'a>( - &'a self, - pkg: &'a PackageId, - ) -> Vec<(&'a PackageId, Option<&'a HashSet>)> { - self.graph.path_to_top(pkg) - } - - pub fn register_used_patches(&mut self, patches: &[Summary]) { - for summary in patches { - if !self.graph.contains(&summary.package_id()) { - self.unused_patches.push(summary.package_id()) - }; - } - } - - pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> { - // Given a previous instance of resolve, it should be forbidden to ever - // have a checksums which *differ*. If the same package ID has differing - // checksums, then something has gone wrong such as: - // - // * Something got seriously corrupted - // * A "mirror" isn't actually a mirror as some changes were made - // * A replacement source wasn't actually a replacement, some changes - // were made - // - // In all of these cases, we want to report an error to indicate that - // something is awry. Normal execution (esp just using crates.io) should - // never run into this. - for (id, cksum) in previous.checksums.iter() { - if let Some(mine) = self.checksums.get(id) { - if mine == cksum { - continue; - } - - // If the previous checksum wasn't calculated, the current - // checksum is `Some`. This may indicate that a source was - // erroneously replaced or was replaced with something that - // desires stronger checksum guarantees than can be afforded - // elsewhere. - if cksum.is_none() { - anyhow::bail!( - "\ -checksum for `{}` was not previously calculated, but a checksum could now \ -be calculated - -this could be indicative of a few possible situations: - - * the source `{}` did not previously support checksums, - but was replaced with one that does - * newer Cargo implementations know how to checksum this source, but this - older implementation does not - * the lock file is corrupt -", - id, - id.source_id() - ) - - // If our checksum hasn't been calculated, then it could mean - // that future Cargo figured out how to checksum something or - // more realistically we were overridden with a source that does - // not have checksums. - } else if mine.is_none() { - anyhow::bail!( - "\ -checksum for `{}` could not be calculated, but a checksum is listed in \ -the existing lock file - -this could be indicative of a few possible situations: - - * the source `{}` supports checksums, - but was replaced with one that doesn't - * the lock file is corrupt - -unable to verify that `{0}` is the same as when the lockfile was generated -", - id, - id.source_id() - ) - - // If the checksums aren't equal, and neither is None, then they - // must both be Some, in which case the checksum now differs. - // That's quite bad! - } else { - anyhow::bail!( - "\ -checksum for `{}` changed between lock files - -this could be indicative of a few possible errors: - - * the lock file is corrupt - * a replacement source in use (e.g., a mirror) returned a different checksum - * the source itself may be corrupt in one way or another - -unable to verify that `{0}` is the same as when the lockfile was generated -", - id - ); - } - } - } - - // Be sure to just copy over any unknown metadata. - self.metadata = previous.metadata.clone(); - - // Preserve the lockfile encoding where possible to avoid lockfile churn - self.version = previous.version; - - Ok(()) - } - - pub fn contains(&self, k: &Q) -> bool - where - PackageId: Borrow, - Q: Ord + Eq, - { - self.graph.contains(k) - } - - pub fn sort(&self) -> Vec { - self.graph.sort() - } - - pub fn iter(&self) -> impl Iterator + '_ { - self.graph.iter().cloned() - } - - pub fn deps(&self, pkg: PackageId) -> impl Iterator)> { - self.deps_not_replaced(pkg) - .map(move |(id, deps)| (self.replacement(id).unwrap_or(id), deps)) - } - - pub fn deps_not_replaced( - &self, - pkg: PackageId, - ) -> impl Iterator)> { - self.graph.edges(&pkg).map(|(id, deps)| (*id, deps)) - } - - pub fn replacement(&self, pkg: PackageId) -> Option { - self.replacements.get(&pkg).cloned() - } - - pub fn replacements(&self) -> &HashMap { - &self.replacements - } - - pub fn features(&self, pkg: PackageId) -> &[InternedString] { - self.features.get(&pkg).map(|v| &**v).unwrap_or(&[]) - } - - /// This is only here for legacy support, it will be removed when - /// switching to the new feature resolver. - pub fn features_clone(&self) -> HashMap> { - self.features.clone() - } - - pub fn is_public_dep(&self, pkg: PackageId, dep: PackageId) -> bool { - self.public_dependencies - .get(&pkg) - .map(|public_deps| public_deps.contains(&dep)) - .unwrap_or_else(|| panic!("Unknown dependency {:?} for package {:?}", dep, pkg)) - } - - pub fn query(&self, spec: &str) -> CargoResult { - PackageIdSpec::query_str(spec, self.iter()) - } - - pub fn specs_to_ids(&self, specs: &[PackageIdSpec]) -> CargoResult> { - specs.iter().map(|s| s.query(self.iter())).collect() - } - - pub fn unused_patches(&self) -> &[PackageId] { - &self.unused_patches - } - - pub fn checksums(&self) -> &HashMap> { - &self.checksums - } - - pub fn metadata(&self) -> &Metadata { - &self.metadata - } - - pub fn extern_crate_name( - &self, - from: PackageId, - to: PackageId, - to_target: &Target, - ) -> CargoResult { - let empty_set: HashSet = HashSet::new(); - let deps = if from == to { - &empty_set - } else { - self.dependencies_listed(from, to) - }; - - let crate_name = to_target.crate_name(); - let mut names = deps.iter().map(|d| { - d.explicit_name_in_toml() - .map(|s| s.as_str().replace("-", "_")) - .unwrap_or_else(|| crate_name.clone()) - }); - let name = names.next().unwrap_or_else(|| crate_name.clone()); - for n in names { - anyhow::ensure!( - n == name, - "the crate `{}` depends on crate `{}` multiple times with different names", - from, - to, - ); - } - Ok(name) - } - - fn dependencies_listed(&self, from: PackageId, to: PackageId) -> &HashSet { - // We've got a dependency on `from` to `to`, but this dependency edge - // may be affected by [replace]. If the `to` package is listed as the - // target of a replacement (aka the key of a reverse replacement map) - // then we try to find our dependency edge through that. If that fails - // then we go down below assuming it's not replaced. - // - // Note that we don't treat `from` as if it's been replaced because - // that's where the dependency originates from, and we only replace - // targets of dependencies not the originator. - if let Some(replace) = self.reverse_replacements.get(&to) { - if let Some(deps) = self.graph.edge(&from, replace) { - return deps; - } - } - match self.graph.edge(&from, &to) { - Some(ret) => ret, - None => panic!("no Dependency listed for `{}` => `{}`", from, to), - } - } - - /// Returns the version of the encoding that's being used for this lock - /// file. - pub fn version(&self) -> ResolveVersion { - self.version - } - - pub fn set_version(&mut self, version: ResolveVersion) { - self.version = version; - } - - pub fn summary(&self, pkg_id: PackageId) -> &Summary { - &self.summaries[&pkg_id] - } -} - -impl PartialEq for Resolve { - fn eq(&self, other: &Resolve) -> bool { - macro_rules! compare { - ($($fields:ident)* | $($ignored:ident)*) => { - let Resolve { $($fields,)* $($ignored: _,)* } = self; - $($fields == &other.$fields)&&* - } - } - compare! { - // fields to compare - graph replacements reverse_replacements features - checksums metadata unused_patches public_dependencies summaries - | - // fields to ignore - version - } - } -} - -impl fmt::Debug for Resolve { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - writeln!(fmt, "graph: {:?}", self.graph)?; - writeln!(fmt, "\nfeatures: {{")?; - for (pkg, features) in &self.features { - writeln!(fmt, " {}: {:?}", pkg, features)?; - } - write!(fmt, "}}") - } -} - -impl Default for ResolveVersion { - /// The default way to encode new or updated `Cargo.lock` files. - /// - /// It's important that if a new version of `ResolveVersion` is added that - /// this is not updated until *at least* the support for the version is in - /// the stable release of Rust. - /// - /// This resolve version will be used for all new lock files, for example - /// those generated by `cargo update` (update everything) or building after - /// a `cargo new` (where no lock file previously existed). This is also used - /// for *updated* lock files such as when a dependency is added or when a - /// version requirement changes. In this situation Cargo's updating the lock - /// file anyway so it takes the opportunity to bump the lock file version - /// forward. - fn default() -> ResolveVersion { - ResolveVersion::V3 - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/types.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/types.rs deleted file mode 100644 index 4617e3ea8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/types.rs +++ /dev/null @@ -1,374 +0,0 @@ -use super::features::{CliFeatures, RequestedFeatures}; -use crate::core::{Dependency, PackageId, Summary}; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::Config; -use std::cmp::Ordering; -use std::collections::{BTreeMap, BTreeSet}; -use std::ops::Range; -use std::rc::Rc; -use std::time::{Duration, Instant}; - -pub struct ResolverProgress { - ticks: u16, - start: Instant, - time_to_print: Duration, - printed: bool, - deps_time: Duration, - #[cfg(debug_assertions)] - slow_cpu_multiplier: u64, -} - -impl ResolverProgress { - pub fn new() -> ResolverProgress { - ResolverProgress { - ticks: 0, - start: Instant::now(), - time_to_print: Duration::from_millis(500), - printed: false, - deps_time: Duration::new(0, 0), - // Some CI setups are much slower then the equipment used by Cargo itself. - // Architectures that do not have a modern processor, hardware emulation, etc. - // In the test code we have `slow_cpu_multiplier`, but that is not accessible here. - #[cfg(debug_assertions)] - slow_cpu_multiplier: std::env::var("CARGO_TEST_SLOW_CPU_MULTIPLIER") - .ok() - .and_then(|m| m.parse().ok()) - .unwrap_or(1), - } - } - pub fn shell_status(&mut self, config: Option<&Config>) -> CargoResult<()> { - // If we spend a lot of time here (we shouldn't in most cases) then give - // a bit of a visual indicator as to what we're doing. Only enable this - // when stderr is a tty (a human is likely to be watching) to ensure we - // get deterministic output otherwise when observed by tools. - // - // Also note that we hit this loop a lot, so it's fairly performance - // sensitive. As a result try to defer a possibly expensive operation - // like `Instant::now` by only checking every N iterations of this loop - // to amortize the cost of the current time lookup. - self.ticks += 1; - if let Some(config) = config { - if config.shell().is_err_tty() - && !self.printed - && self.ticks % 1000 == 0 - && self.start.elapsed() - self.deps_time > self.time_to_print - { - self.printed = true; - config.shell().status("Resolving", "dependency graph...")?; - } - } - #[cfg(debug_assertions)] - { - // The largest test in our suite takes less then 5000 ticks - // with all the algorithm improvements. - // If any of them are removed then it takes more than I am willing to measure. - // So lets fail the test fast if we have ben running for two long. - assert!( - self.ticks < 50_000, - "got to 50_000 ticks in {:?}", - self.start.elapsed() - ); - // The largest test in our suite takes less then 30 sec - // with all the improvements to how fast a tick can go. - // If any of them are removed then it takes more than I am willing to measure. - // So lets fail the test fast if we have ben running for two long. - if self.ticks % 1000 == 0 { - assert!( - self.start.elapsed() - self.deps_time - < Duration::from_secs(self.slow_cpu_multiplier * 90) - ); - } - } - Ok(()) - } - pub fn elapsed(&mut self, dur: Duration) { - self.deps_time += dur; - } -} - -/// The preferred way to store the set of activated features for a package. -/// This is sorted so that it impls Hash, and owns its contents, -/// needed so it can be part of the key for caching in the `DepsCache`. -/// It is also cloned often as part of `Context`, hence the `RC`. -/// `im-rs::OrdSet` was slower of small sets like this, -/// but this can change with improvements to std, im, or llvm. -/// Using a consistent type for this allows us to use the highly -/// optimized comparison operators like `is_subset` at the interfaces. -pub type FeaturesSet = Rc>; - -/// Resolver behavior, used to opt-in to new behavior that is -/// backwards-incompatible via the `resolver` field in the manifest. -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] -pub enum ResolveBehavior { - /// V1 is the original resolver behavior. - V1, - /// V2 adds the new feature resolver. - V2, -} - -impl ResolveBehavior { - pub fn from_manifest(resolver: &str) -> CargoResult { - match resolver { - "1" => Ok(ResolveBehavior::V1), - "2" => Ok(ResolveBehavior::V2), - s => anyhow::bail!( - "`resolver` setting `{}` is not valid, valid options are \"1\" or \"2\"", - s - ), - } - } - - pub fn to_manifest(&self) -> Option { - match self { - ResolveBehavior::V1 => None, - ResolveBehavior::V2 => Some("2".to_string()), - } - } -} - -/// Options for how the resolve should work. -#[derive(Clone, Debug, Eq, PartialEq, Hash)] -pub struct ResolveOpts { - /// Whether or not dev-dependencies should be included. - /// - /// This may be set to `false` by things like `cargo install` or `-Z avoid-dev-deps`. - /// It also gets set to `false` when activating dependencies in the resolver. - pub dev_deps: bool, - /// Set of features requested on the command-line. - pub features: RequestedFeatures, -} - -impl ResolveOpts { - /// Creates a ResolveOpts that resolves everything. - pub fn everything() -> ResolveOpts { - ResolveOpts { - dev_deps: true, - features: RequestedFeatures::CliFeatures(CliFeatures::new_all(true)), - } - } - - pub fn new(dev_deps: bool, features: RequestedFeatures) -> ResolveOpts { - ResolveOpts { dev_deps, features } - } -} - -#[derive(Clone)] -pub struct DepsFrame { - pub parent: Summary, - pub just_for_error_messages: bool, - pub remaining_siblings: RcVecIter, -} - -impl DepsFrame { - /// Returns the least number of candidates that any of this frame's siblings - /// has. - /// - /// The `remaining_siblings` array is already sorted with the smallest - /// number of candidates at the front, so we just return the number of - /// candidates in that entry. - fn min_candidates(&self) -> usize { - self.remaining_siblings - .peek() - .map(|(_, (_, candidates, _))| candidates.len()) - .unwrap_or(0) - } - - pub fn flatten(&self) -> impl Iterator + '_ { - self.remaining_siblings - .clone() - .map(move |(d, _, _)| (self.parent.package_id(), d)) - } -} - -impl PartialEq for DepsFrame { - fn eq(&self, other: &DepsFrame) -> bool { - self.just_for_error_messages == other.just_for_error_messages - && self.min_candidates() == other.min_candidates() - } -} - -impl Eq for DepsFrame {} - -impl PartialOrd for DepsFrame { - fn partial_cmp(&self, other: &DepsFrame) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for DepsFrame { - fn cmp(&self, other: &DepsFrame) -> Ordering { - self.just_for_error_messages - .cmp(&other.just_for_error_messages) - .reverse() - .then_with(|| self.min_candidates().cmp(&other.min_candidates())) - } -} - -/// Note that an `OrdSet` is used for the remaining dependencies that need -/// activation. This set is sorted by how many candidates each dependency has. -/// -/// This helps us get through super constrained portions of the dependency -/// graph quickly and hopefully lock down what later larger dependencies can -/// use (those with more candidates). -#[derive(Clone)] -pub struct RemainingDeps { - /// a monotonic counter, increased for each new insertion. - time: u32, - /// the data is augmented by the insertion time. - /// This insures that no two items will cmp eq. - /// Forcing the OrdSet into a multi set. - data: im_rc::OrdSet<(DepsFrame, u32)>, -} - -impl RemainingDeps { - pub fn new() -> RemainingDeps { - RemainingDeps { - time: 0, - data: im_rc::OrdSet::new(), - } - } - pub fn push(&mut self, x: DepsFrame) { - let insertion_time = self.time; - self.data.insert((x, insertion_time)); - self.time += 1; - } - pub fn pop_most_constrained(&mut self) -> Option<(bool, (Summary, DepInfo))> { - while let Some((mut deps_frame, insertion_time)) = self.data.remove_min() { - let just_here_for_the_error_messages = deps_frame.just_for_error_messages; - - // Figure out what our next dependency to activate is, and if nothing is - // listed then we're entirely done with this frame (yay!) and we can - // move on to the next frame. - if let Some(sibling) = deps_frame.remaining_siblings.next() { - let parent = Summary::clone(&deps_frame.parent); - self.data.insert((deps_frame, insertion_time)); - return Some((just_here_for_the_error_messages, (parent, sibling))); - } - } - None - } - pub fn iter(&mut self) -> impl Iterator + '_ { - self.data.iter().flat_map(|(other, _)| other.flatten()) - } -} - -/// Information about the dependencies for a crate, a tuple of: -/// -/// (dependency info, candidates, features activated) -pub type DepInfo = (Dependency, Rc>, FeaturesSet); - -/// All possible reasons that a package might fail to activate. -/// -/// We maintain a list of conflicts for error reporting as well as backtracking -/// purposes. Each reason here is why candidates may be rejected or why we may -/// fail to resolve a dependency. -#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] -pub enum ConflictReason { - /// There was a semver conflict, for example we tried to activate a package - /// 1.0.2 but 1.1.0 was already activated (aka a compatible semver version - /// is already activated) - Semver, - - /// The `links` key is being violated. For example one crate in the - /// dependency graph has `links = "foo"` but this crate also had that, and - /// we're only allowed one per dependency graph. - Links(InternedString), - - /// A dependency listed features that weren't actually available on the - /// candidate. For example we tried to activate feature `foo` but the - /// candidate we're activating didn't actually have the feature `foo`. - MissingFeatures(String), - - /// A dependency listed a feature that ended up being a required dependency. - /// For example we tried to activate feature `foo` but the - /// candidate we're activating didn't actually have the feature `foo` - /// it had a dependency `foo` instead. - RequiredDependencyAsFeature(InternedString), - - /// A dependency listed a feature for an optional dependency, but that - /// optional dependency is "hidden" using namespaced `dep:` syntax. - NonImplicitDependencyAsFeature(InternedString), - - // TODO: needs more info for `activation_error` - // TODO: needs more info for `find_candidate` - /// pub dep error - PublicDependency(PackageId), - PubliclyExports(PackageId), -} - -impl ConflictReason { - pub fn is_links(&self) -> bool { - matches!(self, ConflictReason::Links(_)) - } - - pub fn is_missing_features(&self) -> bool { - matches!(self, ConflictReason::MissingFeatures(_)) - } - - pub fn is_required_dependency_as_features(&self) -> bool { - matches!(self, ConflictReason::RequiredDependencyAsFeature(_)) - } - - pub fn is_public_dependency(&self) -> bool { - matches!( - self, - ConflictReason::PublicDependency(_) | ConflictReason::PubliclyExports(_) - ) - } -} - -/// A list of packages that have gotten in the way of resolving a dependency. -/// If resolving a dependency fails then this represents an incompatibility, -/// that dependency will never be resolve while all of these packages are active. -/// This is useless if the packages can't be simultaneously activated for other reasons. -pub type ConflictMap = BTreeMap; - -pub struct RcVecIter { - vec: Rc>, - rest: Range, -} - -impl RcVecIter { - pub fn new(vec: Rc>) -> RcVecIter { - RcVecIter { - rest: 0..vec.len(), - vec, - } - } - - fn peek(&self) -> Option<(usize, &T)> { - self.rest - .clone() - .next() - .and_then(|i| self.vec.get(i).map(|val| (i, &*val))) - } -} - -// Not derived to avoid `T: Clone` -impl Clone for RcVecIter { - fn clone(&self) -> RcVecIter { - RcVecIter { - vec: self.vec.clone(), - rest: self.rest.clone(), - } - } -} - -impl Iterator for RcVecIter -where - T: Clone, -{ - type Item = T; - - fn next(&mut self) -> Option { - self.rest.next().and_then(|i| self.vec.get(i).cloned()) - } - - fn size_hint(&self) -> (usize, Option) { - // rest is a std::ops::Range, which is an ExactSizeIterator. - self.rest.size_hint() - } -} - -impl ExactSizeIterator for RcVecIter {} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/version_prefs.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/version_prefs.rs deleted file mode 100644 index 8eb800c40..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/resolver/version_prefs.rs +++ /dev/null @@ -1,181 +0,0 @@ -//! This module implements support for preferring some versions of a package -//! over other versions. - -use std::cmp::Ordering; -use std::collections::{HashMap, HashSet}; - -use crate::core::{Dependency, PackageId, Summary}; -use crate::util::interning::InternedString; - -/// A collection of preferences for particular package versions. -/// -/// This is built up with [`Self::prefer_package_id`] and [`Self::prefer_dependency`], then used to sort the set of -/// summaries for a package during resolution via [`Self::sort_summaries`]. -/// -/// As written, a version is either "preferred" or "not preferred". Later extensions may -/// introduce more granular preferences. -#[derive(Default)] -pub struct VersionPreferences { - try_to_use: HashSet, - prefer_patch_deps: HashMap>, -} - -pub enum VersionOrdering { - MaximumVersionsFirst, - MinimumVersionsFirst, -} - -impl VersionPreferences { - /// Indicate that the given package (specified as a [`PackageId`]) should be preferred. - pub fn prefer_package_id(&mut self, pkg_id: PackageId) { - self.try_to_use.insert(pkg_id); - } - - /// Indicate that the given package (specified as a [`Dependency`]) should be preferred. - pub fn prefer_dependency(&mut self, dep: Dependency) { - self.prefer_patch_deps - .entry(dep.package_name()) - .or_insert_with(HashSet::new) - .insert(dep); - } - - /// Sort the given vector of summaries in-place, with all summaries presumed to be for - /// the same package. Preferred versions appear first in the result, sorted by - /// `version_ordering`, followed by non-preferred versions sorted the same way. - pub fn sort_summaries(&self, summaries: &mut Vec, version_ordering: VersionOrdering) { - let should_prefer = |pkg_id: &PackageId| { - self.try_to_use.contains(pkg_id) - || self - .prefer_patch_deps - .get(&pkg_id.name()) - .map(|deps| deps.iter().any(|d| d.matches_id(*pkg_id))) - .unwrap_or(false) - }; - summaries.sort_unstable_by(|a, b| { - let prefer_a = should_prefer(&a.package_id()); - let prefer_b = should_prefer(&b.package_id()); - let previous_cmp = prefer_a.cmp(&prefer_b).reverse(); - match previous_cmp { - Ordering::Equal => { - let cmp = a.version().cmp(b.version()); - match version_ordering { - VersionOrdering::MaximumVersionsFirst => cmp.reverse(), - VersionOrdering::MinimumVersionsFirst => cmp, - } - } - _ => previous_cmp, - } - }); - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::core::SourceId; - use crate::util::Config; - use std::collections::BTreeMap; - - fn pkgid(name: &str, version: &str) -> PackageId { - let src_id = - SourceId::from_url("registry+https://github.com/rust-lang/crates.io-index").unwrap(); - PackageId::new(name, version, src_id).unwrap() - } - - fn dep(name: &str, version: &str) -> Dependency { - let src_id = - SourceId::from_url("registry+https://github.com/rust-lang/crates.io-index").unwrap(); - Dependency::parse(name, Some(version), src_id).unwrap() - } - - fn summ(name: &str, version: &str) -> Summary { - let pkg_id = pkgid(name, version); - let config = Config::default().unwrap(); - let features = BTreeMap::new(); - Summary::new(&config, pkg_id, Vec::new(), &features, None::<&String>).unwrap() - } - - fn describe(summaries: &Vec) -> String { - let strs: Vec = summaries - .iter() - .map(|summary| format!("{}/{}", summary.name(), summary.version())) - .collect(); - strs.join(", ") - } - - #[test] - fn test_prefer_package_id() { - let mut vp = VersionPreferences::default(); - vp.prefer_package_id(pkgid("foo", "1.2.3")); - - let mut summaries = vec![ - summ("foo", "1.2.4"), - summ("foo", "1.2.3"), - summ("foo", "1.1.0"), - summ("foo", "1.0.9"), - ]; - - vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst); - assert_eq!( - describe(&summaries), - "foo/1.2.3, foo/1.2.4, foo/1.1.0, foo/1.0.9".to_string() - ); - - vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst); - assert_eq!( - describe(&summaries), - "foo/1.2.3, foo/1.0.9, foo/1.1.0, foo/1.2.4".to_string() - ); - } - - #[test] - fn test_prefer_dependency() { - let mut vp = VersionPreferences::default(); - vp.prefer_dependency(dep("foo", "=1.2.3")); - - let mut summaries = vec![ - summ("foo", "1.2.4"), - summ("foo", "1.2.3"), - summ("foo", "1.1.0"), - summ("foo", "1.0.9"), - ]; - - vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst); - assert_eq!( - describe(&summaries), - "foo/1.2.3, foo/1.2.4, foo/1.1.0, foo/1.0.9".to_string() - ); - - vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst); - assert_eq!( - describe(&summaries), - "foo/1.2.3, foo/1.0.9, foo/1.1.0, foo/1.2.4".to_string() - ); - } - - #[test] - fn test_prefer_both() { - let mut vp = VersionPreferences::default(); - vp.prefer_package_id(pkgid("foo", "1.2.3")); - vp.prefer_dependency(dep("foo", "=1.1.0")); - - let mut summaries = vec![ - summ("foo", "1.2.4"), - summ("foo", "1.2.3"), - summ("foo", "1.1.0"), - summ("foo", "1.0.9"), - ]; - - vp.sort_summaries(&mut summaries, VersionOrdering::MaximumVersionsFirst); - assert_eq!( - describe(&summaries), - "foo/1.2.3, foo/1.1.0, foo/1.2.4, foo/1.0.9".to_string() - ); - - vp.sort_summaries(&mut summaries, VersionOrdering::MinimumVersionsFirst); - assert_eq!( - describe(&summaries), - "foo/1.1.0, foo/1.2.3, foo/1.0.9, foo/1.2.4".to_string() - ); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/shell.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/shell.rs deleted file mode 100644 index 887b8967d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/shell.rs +++ /dev/null @@ -1,555 +0,0 @@ -use std::fmt; -use std::io::prelude::*; - -use termcolor::Color::{Cyan, Green, Red, Yellow}; -use termcolor::{self, Color, ColorSpec, StandardStream, WriteColor}; - -use crate::util::errors::CargoResult; - -pub enum TtyWidth { - NoTty, - Known(usize), - Guess(usize), -} - -impl TtyWidth { - /// Returns the width provided with `-Z terminal-width` to rustc to truncate diagnostics with - /// long lines. - pub fn diagnostic_terminal_width(&self) -> Option { - match *self { - TtyWidth::NoTty | TtyWidth::Guess(_) => None, - TtyWidth::Known(width) => Some(width), - } - } - - /// Returns the width used by progress bars for the tty. - pub fn progress_max_width(&self) -> Option { - match *self { - TtyWidth::NoTty => None, - TtyWidth::Known(width) | TtyWidth::Guess(width) => Some(width), - } - } -} - -/// The requested verbosity of output. -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum Verbosity { - Verbose, - Normal, - Quiet, -} - -/// An abstraction around console output that remembers preferences for output -/// verbosity and color. -pub struct Shell { - /// Wrapper around stdout/stderr. This helps with supporting sending - /// output to a memory buffer which is useful for tests. - output: ShellOut, - /// How verbose messages should be. - verbosity: Verbosity, - /// Flag that indicates the current line needs to be cleared before - /// printing. Used when a progress bar is currently displayed. - needs_clear: bool, -} - -impl fmt::Debug for Shell { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.output { - ShellOut::Write(_) => f - .debug_struct("Shell") - .field("verbosity", &self.verbosity) - .finish(), - ShellOut::Stream { color_choice, .. } => f - .debug_struct("Shell") - .field("verbosity", &self.verbosity) - .field("color_choice", &color_choice) - .finish(), - } - } -} - -/// A `Write`able object, either with or without color support -enum ShellOut { - /// A plain write object without color support - Write(Box), - /// Color-enabled stdio, with information on whether color should be used - Stream { - stdout: StandardStream, - stderr: StandardStream, - stderr_tty: bool, - color_choice: ColorChoice, - }, -} - -/// Whether messages should use color output -#[derive(Debug, PartialEq, Clone, Copy)] -pub enum ColorChoice { - /// Force color output - Always, - /// Force disable color output - Never, - /// Intelligently guess whether to use color output - CargoAuto, -} - -impl Shell { - /// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose - /// output. - pub fn new() -> Shell { - let auto_clr = ColorChoice::CargoAuto; - Shell { - output: ShellOut::Stream { - stdout: StandardStream::stdout( - auto_clr.to_termcolor_color_choice(atty::Stream::Stdout), - ), - stderr: StandardStream::stderr( - auto_clr.to_termcolor_color_choice(atty::Stream::Stderr), - ), - color_choice: ColorChoice::CargoAuto, - stderr_tty: atty::is(atty::Stream::Stderr), - }, - verbosity: Verbosity::Verbose, - needs_clear: false, - } - } - - /// Creates a shell from a plain writable object, with no color, and max verbosity. - pub fn from_write(out: Box) -> Shell { - Shell { - output: ShellOut::Write(out), - verbosity: Verbosity::Verbose, - needs_clear: false, - } - } - - /// Prints a message, where the status will have `color` color, and can be justified. The - /// messages follows without color. - fn print( - &mut self, - status: &dyn fmt::Display, - message: Option<&dyn fmt::Display>, - color: Color, - justified: bool, - ) -> CargoResult<()> { - match self.verbosity { - Verbosity::Quiet => Ok(()), - _ => { - if self.needs_clear { - self.err_erase_line(); - } - self.output - .message_stderr(status, message, color, justified) - } - } - } - - /// Sets whether the next print should clear the current line. - pub fn set_needs_clear(&mut self, needs_clear: bool) { - self.needs_clear = needs_clear; - } - - /// Returns `true` if the `needs_clear` flag is unset. - pub fn is_cleared(&self) -> bool { - !self.needs_clear - } - - /// Returns the width of the terminal in spaces, if any. - pub fn err_width(&self) -> TtyWidth { - match self.output { - ShellOut::Stream { - stderr_tty: true, .. - } => imp::stderr_width(), - _ => TtyWidth::NoTty, - } - } - - /// Returns `true` if stderr is a tty. - pub fn is_err_tty(&self) -> bool { - match self.output { - ShellOut::Stream { stderr_tty, .. } => stderr_tty, - _ => false, - } - } - - /// Gets a reference to the underlying stdout writer. - pub fn out(&mut self) -> &mut dyn Write { - if self.needs_clear { - self.err_erase_line(); - } - self.output.stdout() - } - - /// Gets a reference to the underlying stderr writer. - pub fn err(&mut self) -> &mut dyn Write { - if self.needs_clear { - self.err_erase_line(); - } - self.output.stderr() - } - - /// Erase from cursor to end of line. - pub fn err_erase_line(&mut self) { - if self.err_supports_color() { - imp::err_erase_line(self); - self.needs_clear = false; - } - } - - /// Shortcut to right-align and color green a status message. - pub fn status(&mut self, status: T, message: U) -> CargoResult<()> - where - T: fmt::Display, - U: fmt::Display, - { - self.print(&status, Some(&message), Green, true) - } - - pub fn status_header(&mut self, status: T) -> CargoResult<()> - where - T: fmt::Display, - { - self.print(&status, None, Cyan, true) - } - - /// Shortcut to right-align a status message. - pub fn status_with_color( - &mut self, - status: T, - message: U, - color: Color, - ) -> CargoResult<()> - where - T: fmt::Display, - U: fmt::Display, - { - self.print(&status, Some(&message), color, true) - } - - /// Runs the callback only if we are in verbose mode. - pub fn verbose(&mut self, mut callback: F) -> CargoResult<()> - where - F: FnMut(&mut Shell) -> CargoResult<()>, - { - match self.verbosity { - Verbosity::Verbose => callback(self), - _ => Ok(()), - } - } - - /// Runs the callback if we are not in verbose mode. - pub fn concise(&mut self, mut callback: F) -> CargoResult<()> - where - F: FnMut(&mut Shell) -> CargoResult<()>, - { - match self.verbosity { - Verbosity::Verbose => Ok(()), - _ => callback(self), - } - } - - /// Prints a red 'error' message. - pub fn error(&mut self, message: T) -> CargoResult<()> { - if self.needs_clear { - self.err_erase_line(); - } - self.output - .message_stderr(&"error", Some(&message), Red, false) - } - - /// Prints an amber 'warning' message. - pub fn warn(&mut self, message: T) -> CargoResult<()> { - match self.verbosity { - Verbosity::Quiet => Ok(()), - _ => self.print(&"warning", Some(&message), Yellow, false), - } - } - - /// Prints a cyan 'note' message. - pub fn note(&mut self, message: T) -> CargoResult<()> { - self.print(&"note", Some(&message), Cyan, false) - } - - /// Updates the verbosity of the shell. - pub fn set_verbosity(&mut self, verbosity: Verbosity) { - self.verbosity = verbosity; - } - - /// Gets the verbosity of the shell. - pub fn verbosity(&self) -> Verbosity { - self.verbosity - } - - /// Updates the color choice (always, never, or auto) from a string.. - pub fn set_color_choice(&mut self, color: Option<&str>) -> CargoResult<()> { - if let ShellOut::Stream { - ref mut stdout, - ref mut stderr, - ref mut color_choice, - .. - } = self.output - { - let cfg = match color { - Some("always") => ColorChoice::Always, - Some("never") => ColorChoice::Never, - - Some("auto") | None => ColorChoice::CargoAuto, - - Some(arg) => anyhow::bail!( - "argument for --color must be auto, always, or \ - never, but found `{}`", - arg - ), - }; - *color_choice = cfg; - *stdout = StandardStream::stdout(cfg.to_termcolor_color_choice(atty::Stream::Stdout)); - *stderr = StandardStream::stderr(cfg.to_termcolor_color_choice(atty::Stream::Stderr)); - } - Ok(()) - } - - /// Gets the current color choice. - /// - /// If we are not using a color stream, this will always return `Never`, even if the color - /// choice has been set to something else. - pub fn color_choice(&self) -> ColorChoice { - match self.output { - ShellOut::Stream { color_choice, .. } => color_choice, - ShellOut::Write(_) => ColorChoice::Never, - } - } - - /// Whether the shell supports color. - pub fn err_supports_color(&self) -> bool { - match &self.output { - ShellOut::Write(_) => false, - ShellOut::Stream { stderr, .. } => stderr.supports_color(), - } - } - - pub fn out_supports_color(&self) -> bool { - match &self.output { - ShellOut::Write(_) => false, - ShellOut::Stream { stdout, .. } => stdout.supports_color(), - } - } - - /// Prints a message to stderr and translates ANSI escape code into console colors. - pub fn print_ansi_stderr(&mut self, message: &[u8]) -> CargoResult<()> { - if self.needs_clear { - self.err_erase_line(); - } - #[cfg(windows)] - { - if let ShellOut::Stream { stderr, .. } = &mut self.output { - ::fwdansi::write_ansi(stderr, message)?; - return Ok(()); - } - } - self.err().write_all(message)?; - Ok(()) - } - - /// Prints a message to stdout and translates ANSI escape code into console colors. - pub fn print_ansi_stdout(&mut self, message: &[u8]) -> CargoResult<()> { - if self.needs_clear { - self.err_erase_line(); - } - #[cfg(windows)] - { - if let ShellOut::Stream { stdout, .. } = &mut self.output { - ::fwdansi::write_ansi(stdout, message)?; - return Ok(()); - } - } - self.out().write_all(message)?; - Ok(()) - } - - pub fn print_json(&mut self, obj: &T) -> CargoResult<()> { - // Path may fail to serialize to JSON ... - let encoded = serde_json::to_string(&obj)?; - // ... but don't fail due to a closed pipe. - drop(writeln!(self.out(), "{}", encoded)); - Ok(()) - } -} - -impl Default for Shell { - fn default() -> Self { - Self::new() - } -} - -impl ShellOut { - /// Prints out a message with a status. The status comes first, and is bold plus the given - /// color. The status can be justified, in which case the max width that will right align is - /// 12 chars. - fn message_stderr( - &mut self, - status: &dyn fmt::Display, - message: Option<&dyn fmt::Display>, - color: Color, - justified: bool, - ) -> CargoResult<()> { - match *self { - ShellOut::Stream { ref mut stderr, .. } => { - stderr.reset()?; - stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)))?; - if justified { - write!(stderr, "{:>12}", status)?; - } else { - write!(stderr, "{}", status)?; - stderr.set_color(ColorSpec::new().set_bold(true))?; - write!(stderr, ":")?; - } - stderr.reset()?; - match message { - Some(message) => writeln!(stderr, " {}", message)?, - None => write!(stderr, " ")?, - } - } - ShellOut::Write(ref mut w) => { - if justified { - write!(w, "{:>12}", status)?; - } else { - write!(w, "{}:", status)?; - } - match message { - Some(message) => writeln!(w, " {}", message)?, - None => write!(w, " ")?, - } - } - } - Ok(()) - } - - /// Gets stdout as a `io::Write`. - fn stdout(&mut self) -> &mut dyn Write { - match *self { - ShellOut::Stream { ref mut stdout, .. } => stdout, - ShellOut::Write(ref mut w) => w, - } - } - - /// Gets stderr as a `io::Write`. - fn stderr(&mut self) -> &mut dyn Write { - match *self { - ShellOut::Stream { ref mut stderr, .. } => stderr, - ShellOut::Write(ref mut w) => w, - } - } -} - -impl ColorChoice { - /// Converts our color choice to termcolor's version. - fn to_termcolor_color_choice(self, stream: atty::Stream) -> termcolor::ColorChoice { - match self { - ColorChoice::Always => termcolor::ColorChoice::Always, - ColorChoice::Never => termcolor::ColorChoice::Never, - ColorChoice::CargoAuto => { - if atty::is(stream) { - termcolor::ColorChoice::Auto - } else { - termcolor::ColorChoice::Never - } - } - } - } -} - -#[cfg(unix)] -mod imp { - use super::{Shell, TtyWidth}; - use std::mem; - - pub fn stderr_width() -> TtyWidth { - unsafe { - let mut winsize: libc::winsize = mem::zeroed(); - // The .into() here is needed for FreeBSD which defines TIOCGWINSZ - // as c_uint but ioctl wants c_ulong. - if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ.into(), &mut winsize) < 0 { - return TtyWidth::NoTty; - } - if winsize.ws_col > 0 { - TtyWidth::Known(winsize.ws_col as usize) - } else { - TtyWidth::NoTty - } - } - } - - pub fn err_erase_line(shell: &mut Shell) { - // This is the "EL - Erase in Line" sequence. It clears from the cursor - // to the end of line. - // https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences - let _ = shell.output.stderr().write_all(b"\x1B[K"); - } -} - -#[cfg(windows)] -mod imp { - use std::{cmp, mem, ptr}; - use winapi::um::fileapi::*; - use winapi::um::handleapi::*; - use winapi::um::processenv::*; - use winapi::um::winbase::*; - use winapi::um::wincon::*; - use winapi::um::winnt::*; - - pub(super) use super::{default_err_erase_line as err_erase_line, TtyWidth}; - - pub fn stderr_width() -> TtyWidth { - unsafe { - let stdout = GetStdHandle(STD_ERROR_HANDLE); - let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); - if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 { - return TtyWidth::Known((csbi.srWindow.Right - csbi.srWindow.Left) as usize); - } - - // On mintty/msys/cygwin based terminals, the above fails with - // INVALID_HANDLE_VALUE. Use an alternate method which works - // in that case as well. - let h = CreateFileA( - "CONOUT$\0".as_ptr() as *const CHAR, - GENERIC_READ | GENERIC_WRITE, - FILE_SHARE_READ | FILE_SHARE_WRITE, - ptr::null_mut(), - OPEN_EXISTING, - 0, - ptr::null_mut(), - ); - if h == INVALID_HANDLE_VALUE { - return TtyWidth::NoTty; - } - - let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); - let rc = GetConsoleScreenBufferInfo(h, &mut csbi); - CloseHandle(h); - if rc != 0 { - let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize; - // Unfortunately cygwin/mintty does not set the size of the - // backing console to match the actual window size. This - // always reports a size of 80 or 120 (not sure what - // determines that). Use a conservative max of 60 which should - // work in most circumstances. ConEmu does some magic to - // resize the console correctly, but there's no reasonable way - // to detect which kind of terminal we are running in, or if - // GetConsoleScreenBufferInfo returns accurate information. - return TtyWidth::Guess(cmp::min(60, width)); - } - - TtyWidth::NoTty - } - } -} - -#[cfg(windows)] -fn default_err_erase_line(shell: &mut Shell) { - match imp::stderr_width() { - TtyWidth::Known(max_width) | TtyWidth::Guess(max_width) => { - let blank = " ".repeat(max_width); - drop(write!(shell.output.stderr(), "{}\r", blank)); - } - _ => (), - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/source/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/source/mod.rs deleted file mode 100644 index 81009ea65..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/source/mod.rs +++ /dev/null @@ -1,321 +0,0 @@ -use std::collections::hash_map::HashMap; -use std::fmt; - -use crate::core::package::PackageSet; -use crate::core::{Dependency, Package, PackageId, Summary}; -use crate::util::{CargoResult, Config}; - -mod source_id; - -pub use self::source_id::{GitReference, SourceId}; - -/// Something that finds and downloads remote packages based on names and versions. -pub trait Source { - /// Returns the `SourceId` corresponding to this source. - fn source_id(&self) -> SourceId; - - /// Returns the replaced `SourceId` corresponding to this source. - fn replaced_source_id(&self) -> SourceId { - self.source_id() - } - - /// Returns whether or not this source will return summaries with - /// checksums listed. - fn supports_checksums(&self) -> bool; - - /// Returns whether or not this source will return summaries with - /// the `precise` field in the source id listed. - fn requires_precise(&self) -> bool; - - /// Attempts to find the packages that match a dependency request. - fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()>; - - /// Attempts to find the packages that are close to a dependency request. - /// Each source gets to define what `close` means for it. - /// Path/Git sources may return all dependencies that are at that URI, - /// whereas an `Index` source may return dependencies that have the same canonicalization. - fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()>; - - fn query_vec(&mut self, dep: &Dependency) -> CargoResult> { - let mut ret = Vec::new(); - self.query(dep, &mut |s| ret.push(s))?; - Ok(ret) - } - - /// Performs any network operations required to get the entire list of all names, - /// versions and dependencies of packages managed by the `Source`. - fn update(&mut self) -> CargoResult<()>; - - /// Fetches the full package for each name and version specified. - fn download(&mut self, package: PackageId) -> CargoResult; - - fn download_now(self: Box, package: PackageId, config: &Config) -> CargoResult - where - Self: std::marker::Sized, - { - let mut sources = SourceMap::new(); - sources.insert(self); - let pkg_set = PackageSet::new(&[package], sources, config)?; - let pkg = pkg_set.get_one(package)?; - Ok(Package::clone(pkg)) - } - - fn finish_download(&mut self, package: PackageId, contents: Vec) -> CargoResult; - - /// Generates a unique string which represents the fingerprint of the - /// current state of the source. - /// - /// This fingerprint is used to determine the "fresheness" of the source - /// later on. It must be guaranteed that the fingerprint of a source is - /// constant if and only if the output product will remain constant. - /// - /// The `pkg` argument is the package which this fingerprint should only be - /// interested in for when this source may contain multiple packages. - fn fingerprint(&self, pkg: &Package) -> CargoResult; - - /// If this source supports it, verifies the source of the package - /// specified. - /// - /// Note that the source may also have performed other checksum-based - /// verification during the `download` step, but this is intended to be run - /// just before a crate is compiled so it may perform more expensive checks - /// which may not be cacheable. - fn verify(&self, _pkg: PackageId) -> CargoResult<()> { - Ok(()) - } - - /// Describes this source in a human readable fashion, used for display in - /// resolver error messages currently. - fn describe(&self) -> String; - - /// Returns whether a source is being replaced by another here. - fn is_replaced(&self) -> bool { - false - } - - /// Add a number of crates that should be whitelisted for showing up during - /// queries, even if they are yanked. Currently only applies to registry - /// sources. - fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]); - - /// Query if a package is yanked. Only registry sources can mark packages - /// as yanked. This ignores the yanked whitelist. - fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult; -} - -pub enum MaybePackage { - Ready(Package), - Download { url: String, descriptor: String }, -} - -impl<'a, T: Source + ?Sized + 'a> Source for Box { - /// Forwards to `Source::source_id`. - fn source_id(&self) -> SourceId { - (**self).source_id() - } - - /// Forwards to `Source::replaced_source_id`. - fn replaced_source_id(&self) -> SourceId { - (**self).replaced_source_id() - } - - /// Forwards to `Source::supports_checksums`. - fn supports_checksums(&self) -> bool { - (**self).supports_checksums() - } - - /// Forwards to `Source::requires_precise`. - fn requires_precise(&self) -> bool { - (**self).requires_precise() - } - - /// Forwards to `Source::query`. - fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - (**self).query(dep, f) - } - - /// Forwards to `Source::query`. - fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - (**self).fuzzy_query(dep, f) - } - - /// Forwards to `Source::update`. - fn update(&mut self) -> CargoResult<()> { - (**self).update() - } - - /// Forwards to `Source::download`. - fn download(&mut self, id: PackageId) -> CargoResult { - (**self).download(id) - } - - fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { - (**self).finish_download(id, data) - } - - /// Forwards to `Source::fingerprint`. - fn fingerprint(&self, pkg: &Package) -> CargoResult { - (**self).fingerprint(pkg) - } - - /// Forwards to `Source::verify`. - fn verify(&self, pkg: PackageId) -> CargoResult<()> { - (**self).verify(pkg) - } - - fn describe(&self) -> String { - (**self).describe() - } - - fn is_replaced(&self) -> bool { - (**self).is_replaced() - } - - fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { - (**self).add_to_yanked_whitelist(pkgs); - } - - fn is_yanked(&mut self, pkg: PackageId) -> CargoResult { - (**self).is_yanked(pkg) - } -} - -impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T { - fn source_id(&self) -> SourceId { - (**self).source_id() - } - - fn replaced_source_id(&self) -> SourceId { - (**self).replaced_source_id() - } - - fn supports_checksums(&self) -> bool { - (**self).supports_checksums() - } - - fn requires_precise(&self) -> bool { - (**self).requires_precise() - } - - fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - (**self).query(dep, f) - } - - fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - (**self).fuzzy_query(dep, f) - } - - fn update(&mut self) -> CargoResult<()> { - (**self).update() - } - - fn download(&mut self, id: PackageId) -> CargoResult { - (**self).download(id) - } - - fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { - (**self).finish_download(id, data) - } - - fn fingerprint(&self, pkg: &Package) -> CargoResult { - (**self).fingerprint(pkg) - } - - fn verify(&self, pkg: PackageId) -> CargoResult<()> { - (**self).verify(pkg) - } - - fn describe(&self) -> String { - (**self).describe() - } - - fn is_replaced(&self) -> bool { - (**self).is_replaced() - } - - fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { - (**self).add_to_yanked_whitelist(pkgs); - } - - fn is_yanked(&mut self, pkg: PackageId) -> CargoResult { - (**self).is_yanked(pkg) - } -} - -/// A `HashMap` of `SourceId` -> `Box`. -#[derive(Default)] -pub struct SourceMap<'src> { - map: HashMap>, -} - -// `impl Debug` on source requires specialization, if even desirable at all. -impl<'src> fmt::Debug for SourceMap<'src> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "SourceMap ")?; - f.debug_set().entries(self.map.keys()).finish() - } -} - -impl<'src> SourceMap<'src> { - /// Creates an empty map. - pub fn new() -> SourceMap<'src> { - SourceMap { - map: HashMap::new(), - } - } - - /// Like `HashMap::contains_key`. - pub fn contains(&self, id: SourceId) -> bool { - self.map.contains_key(&id) - } - - /// Like `HashMap::get`. - pub fn get(&self, id: SourceId) -> Option<&(dyn Source + 'src)> { - self.map.get(&id).map(|s| s.as_ref()) - } - - /// Like `HashMap::get_mut`. - pub fn get_mut(&mut self, id: SourceId) -> Option<&mut (dyn Source + 'src)> { - self.map.get_mut(&id).map(|s| s.as_mut()) - } - - /// Like `HashMap::get`, but first calculates the `SourceId` from a `PackageId`. - pub fn get_by_package_id(&self, pkg_id: PackageId) -> Option<&(dyn Source + 'src)> { - self.get(pkg_id.source_id()) - } - - /// Like `HashMap::insert`, but derives the `SourceId` key from the `Source`. - pub fn insert(&mut self, source: Box) { - let id = source.source_id(); - self.map.insert(id, source); - } - - /// Like `HashMap::is_empty`. - pub fn is_empty(&self) -> bool { - self.map.is_empty() - } - - /// Like `HashMap::len`. - pub fn len(&self) -> usize { - self.map.len() - } - - /// Like `HashMap::values`. - pub fn sources<'a>(&'a self) -> impl Iterator> { - self.map.values() - } - - /// Like `HashMap::iter_mut`. - pub fn sources_mut<'a>( - &'a mut self, - ) -> impl Iterator { - self.map.iter_mut().map(|(a, b)| (a, &mut **b)) - } - - /// Merge the given map into self. - pub fn add_source_map(&mut self, other: SourceMap<'src>) { - for (key, value) in other.map { - self.map.entry(key).or_insert(value); - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/source/source_id.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/source/source_id.rs deleted file mode 100644 index 2f95e10e8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/source/source_id.rs +++ /dev/null @@ -1,726 +0,0 @@ -use crate::core::PackageId; -use crate::sources::{DirectorySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; -use crate::sources::{GitSource, PathSource, RegistrySource}; -use crate::util::{CanonicalUrl, CargoResult, Config, IntoUrl}; -use log::trace; -use serde::de; -use serde::ser; -use std::cmp::{self, Ordering}; -use std::collections::HashSet; -use std::fmt::{self, Formatter}; -use std::hash::{self, Hash}; -use std::path::{Path, PathBuf}; -use std::ptr; -use std::sync::Mutex; -use url::Url; - -lazy_static::lazy_static! { - static ref SOURCE_ID_CACHE: Mutex> = Default::default(); -} - -/// Unique identifier for a source of packages. -#[derive(Clone, Copy, Eq, Debug)] -pub struct SourceId { - inner: &'static SourceIdInner, -} - -#[derive(Eq, Clone, Debug)] -struct SourceIdInner { - /// The source URL. - url: Url, - /// The canonical version of the above url - canonical_url: CanonicalUrl, - /// The source kind. - kind: SourceKind, - /// For example, the exact Git revision of the specified branch for a Git Source. - precise: Option, - /// Name of the registry source for alternative registries - /// WARNING: this is not always set for alt-registries when the name is - /// not known. - name: Option, -} - -/// The possible kinds of code source. Along with `SourceIdInner`, this fully defines the -/// source. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -enum SourceKind { - /// A git repository. - Git(GitReference), - /// A local path. - Path, - /// A remote registry. - Registry, - /// A local filesystem-based registry. - LocalRegistry, - /// A directory-based registry. - Directory, -} - -/// Information to find a specific commit in a Git repository. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum GitReference { - /// From a tag. - Tag(String), - /// From a branch. - Branch(String), - /// From a specific revision. - Rev(String), - /// The default branch of the repository, the reference named `HEAD`. - DefaultBranch, -} - -impl SourceId { - /// Creates a `SourceId` object from the kind and URL. - /// - /// The canonical url will be calculated, but the precise field will not - fn new(kind: SourceKind, url: Url, name: Option<&str>) -> CargoResult { - let source_id = SourceId::wrap(SourceIdInner { - kind, - canonical_url: CanonicalUrl::new(&url)?, - url, - precise: None, - name: name.map(|n| n.into()), - }); - Ok(source_id) - } - - fn wrap(inner: SourceIdInner) -> SourceId { - let mut cache = SOURCE_ID_CACHE.lock().unwrap(); - let inner = cache.get(&inner).cloned().unwrap_or_else(|| { - let inner = Box::leak(Box::new(inner)); - cache.insert(inner); - inner - }); - SourceId { inner } - } - - /// Parses a source URL and returns the corresponding ID. - /// - /// ## Example - /// - /// ``` - /// use cargo::core::SourceId; - /// SourceId::from_url("git+https://github.com/alexcrichton/\ - /// libssh2-static-sys#80e71a3021618eb05\ - /// 656c58fb7c5ef5f12bc747f"); - /// ``` - pub fn from_url(string: &str) -> CargoResult { - let mut parts = string.splitn(2, '+'); - let kind = parts.next().unwrap(); - let url = parts - .next() - .ok_or_else(|| anyhow::format_err!("invalid source `{}`", string))?; - - match kind { - "git" => { - let mut url = url.into_url()?; - let mut reference = GitReference::DefaultBranch; - for (k, v) in url.query_pairs() { - match &k[..] { - // Map older 'ref' to branch. - "branch" | "ref" => reference = GitReference::Branch(v.into_owned()), - - "rev" => reference = GitReference::Rev(v.into_owned()), - "tag" => reference = GitReference::Tag(v.into_owned()), - _ => {} - } - } - let precise = url.fragment().map(|s| s.to_owned()); - url.set_fragment(None); - url.set_query(None); - Ok(SourceId::for_git(&url, reference)?.with_precise(precise)) - } - "registry" => { - let url = url.into_url()?; - Ok(SourceId::new(SourceKind::Registry, url, None)? - .with_precise(Some("locked".to_string()))) - } - "path" => { - let url = url.into_url()?; - SourceId::new(SourceKind::Path, url, None) - } - kind => Err(anyhow::format_err!("unsupported source protocol: {}", kind)), - } - } - - /// A view of the `SourceId` that can be `Display`ed as a URL. - pub fn as_url(&self) -> SourceIdAsUrl<'_> { - SourceIdAsUrl { - inner: &*self.inner, - } - } - - /// Creates a `SourceId` from a filesystem path. - /// - /// `path`: an absolute path. - pub fn for_path(path: &Path) -> CargoResult { - let url = path.into_url()?; - SourceId::new(SourceKind::Path, url, None) - } - - /// Creates a `SourceId` from a Git reference. - pub fn for_git(url: &Url, reference: GitReference) -> CargoResult { - SourceId::new(SourceKind::Git(reference), url.clone(), None) - } - - /// Creates a SourceId from a remote registry URL when the registry name - /// cannot be determined, e.g. a user passes `--index` directly from CLI. - /// - /// Use [`SourceId::for_alt_registry`] if a name can provided, which - /// generates better messages for cargo. - pub fn for_registry(url: &Url) -> CargoResult { - SourceId::new(SourceKind::Registry, url.clone(), None) - } - - /// Creates a `SourceId` from a remote registry URL with given name. - pub fn for_alt_registry(url: &Url, name: &str) -> CargoResult { - SourceId::new(SourceKind::Registry, url.clone(), Some(name)) - } - - /// Creates a SourceId from a local registry path. - pub fn for_local_registry(path: &Path) -> CargoResult { - let url = path.into_url()?; - SourceId::new(SourceKind::LocalRegistry, url, None) - } - - /// Creates a `SourceId` from a directory path. - pub fn for_directory(path: &Path) -> CargoResult { - let url = path.into_url()?; - SourceId::new(SourceKind::Directory, url, None) - } - - /// Returns the `SourceId` corresponding to the main repository. - /// - /// This is the main cargo registry by default, but it can be overridden in - /// a `.cargo/config.toml`. - pub fn crates_io(config: &Config) -> CargoResult { - config.crates_io_source_id(|| { - config.check_registry_index_not_set()?; - let url = CRATES_IO_INDEX.into_url().unwrap(); - SourceId::new(SourceKind::Registry, url, Some(CRATES_IO_REGISTRY)) - }) - } - - /// Gets the `SourceId` associated with given name of the remote regsitry. - pub fn alt_registry(config: &Config, key: &str) -> CargoResult { - let url = config.get_registry_index(key)?; - Ok(SourceId::wrap(SourceIdInner { - kind: SourceKind::Registry, - canonical_url: CanonicalUrl::new(&url)?, - url, - precise: None, - name: Some(key.to_string()), - })) - } - - /// Gets this source URL. - pub fn url(&self) -> &Url { - &self.inner.url - } - - /// Gets the canonical URL of this source, used for internal comparison - /// purposes. - pub fn canonical_url(&self) -> &CanonicalUrl { - &self.inner.canonical_url - } - - pub fn display_index(self) -> String { - if self.is_default_registry() { - format!("{} index", CRATES_IO_DOMAIN) - } else { - format!("`{}` index", self.display_registry_name()) - } - } - - pub fn display_registry_name(self) -> String { - if self.is_default_registry() { - CRATES_IO_REGISTRY.to_string() - } else if let Some(name) = &self.inner.name { - name.clone() - } else if self.precise().is_some() { - // We remove `precise` here to retrieve an permissive version of - // `SourceIdInner`, which may contain the registry name. - self.with_precise(None).display_registry_name() - } else { - url_display(self.url()) - } - } - - /// Returns `true` if this source is from a filesystem path. - pub fn is_path(self) -> bool { - self.inner.kind == SourceKind::Path - } - - /// Returns the local path if this is a path dependency. - pub fn local_path(self) -> Option { - if self.inner.kind != SourceKind::Path { - return None; - } - - Some(self.inner.url.to_file_path().unwrap()) - } - - /// Returns `true` if this source is from a registry (either local or not). - pub fn is_registry(self) -> bool { - matches!( - self.inner.kind, - SourceKind::Registry | SourceKind::LocalRegistry - ) - } - - /// Returns `true` if this source is a "remote" registry. - /// - /// "remote" may also mean a file URL to a git index, so it is not - /// necessarily "remote". This just means it is not `local-registry`. - pub fn is_remote_registry(self) -> bool { - matches!(self.inner.kind, SourceKind::Registry) - } - - /// Returns `true` if this source from a Git repository. - pub fn is_git(self) -> bool { - matches!(self.inner.kind, SourceKind::Git(_)) - } - - /// Creates an implementation of `Source` corresponding to this ID. - pub fn load<'a>( - self, - config: &'a Config, - yanked_whitelist: &HashSet, - ) -> CargoResult> { - trace!("loading SourceId; {}", self); - match self.inner.kind { - SourceKind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)), - SourceKind::Path => { - let path = match self.inner.url.to_file_path() { - Ok(p) => p, - Err(()) => panic!("path sources cannot be remote"), - }; - Ok(Box::new(PathSource::new(&path, self, config))) - } - SourceKind::Registry => Ok(Box::new(RegistrySource::remote( - self, - yanked_whitelist, - config, - ))), - SourceKind::LocalRegistry => { - let path = match self.inner.url.to_file_path() { - Ok(p) => p, - Err(()) => panic!("path sources cannot be remote"), - }; - Ok(Box::new(RegistrySource::local( - self, - &path, - yanked_whitelist, - config, - ))) - } - SourceKind::Directory => { - let path = match self.inner.url.to_file_path() { - Ok(p) => p, - Err(()) => panic!("path sources cannot be remote"), - }; - Ok(Box::new(DirectorySource::new(&path, self, config))) - } - } - } - - /// Gets the value of the precise field. - pub fn precise(self) -> Option<&'static str> { - self.inner.precise.as_deref() - } - - /// Gets the Git reference if this is a git source, otherwise `None`. - pub fn git_reference(self) -> Option<&'static GitReference> { - match self.inner.kind { - SourceKind::Git(ref s) => Some(s), - _ => None, - } - } - - /// Creates a new `SourceId` from this source with the given `precise`. - pub fn with_precise(self, v: Option) -> SourceId { - SourceId::wrap(SourceIdInner { - precise: v, - ..(*self.inner).clone() - }) - } - - /// Returns `true` if the remote registry is the standard . - pub fn is_default_registry(self) -> bool { - match self.inner.kind { - SourceKind::Registry => {} - _ => return false, - } - self.inner.url.as_str() == CRATES_IO_INDEX - } - - /// Hashes `self`. - /// - /// For paths, remove the workspace prefix so the same source will give the - /// same hash in different locations. - pub fn stable_hash(self, workspace: &Path, into: &mut S) { - if self.is_path() { - if let Ok(p) = self - .inner - .url - .to_file_path() - .unwrap() - .strip_prefix(workspace) - { - self.inner.kind.hash(into); - p.to_str().unwrap().hash(into); - return; - } - } - self.hash(into) - } - - pub fn full_eq(self, other: SourceId) -> bool { - ptr::eq(self.inner, other.inner) - } - - pub fn full_hash(self, into: &mut S) { - ptr::NonNull::from(self.inner).hash(into) - } -} - -impl PartialEq for SourceId { - fn eq(&self, other: &SourceId) -> bool { - self.cmp(other) == Ordering::Equal - } -} - -impl PartialOrd for SourceId { - fn partial_cmp(&self, other: &SourceId) -> Option { - Some(self.cmp(other)) - } -} - -// Custom comparison defined as canonical URL equality for git sources and URL -// equality for other sources, ignoring the `precise` and `name` fields. -impl Ord for SourceId { - fn cmp(&self, other: &SourceId) -> Ordering { - // If our interior pointers are to the exact same `SourceIdInner` then - // we're guaranteed to be equal. - if ptr::eq(self.inner, other.inner) { - return Ordering::Equal; - } - - // Sort first based on `kind`, deferring to the URL comparison below if - // the kinds are equal. - match self.inner.kind.cmp(&other.inner.kind) { - Ordering::Equal => {} - other => return other, - } - - // If the `kind` and the `url` are equal, then for git sources we also - // ensure that the canonical urls are equal. - match (&self.inner.kind, &other.inner.kind) { - (SourceKind::Git(_), SourceKind::Git(_)) => { - self.inner.canonical_url.cmp(&other.inner.canonical_url) - } - _ => self.inner.url.cmp(&other.inner.url), - } - } -} - -impl ser::Serialize for SourceId { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - if self.is_path() { - None::.serialize(s) - } else { - s.collect_str(&self.as_url()) - } - } -} - -impl<'de> de::Deserialize<'de> for SourceId { - fn deserialize(d: D) -> Result - where - D: de::Deserializer<'de>, - { - let string = String::deserialize(d)?; - SourceId::from_url(&string).map_err(de::Error::custom) - } -} - -fn url_display(url: &Url) -> String { - if url.scheme() == "file" { - if let Ok(path) = url.to_file_path() { - if let Some(path_str) = path.to_str() { - return path_str.to_string(); - } - } - } - - url.as_str().to_string() -} - -impl fmt::Display for SourceId { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - match self.inner.kind { - SourceKind::Git(ref reference) => { - // Don't replace the URL display for git references, - // because those are kind of expected to be URLs. - write!(f, "{}", self.inner.url)?; - if let Some(pretty) = reference.pretty_ref() { - write!(f, "?{}", pretty)?; - } - - if let Some(ref s) = self.inner.precise { - let len = cmp::min(s.len(), 8); - write!(f, "#{}", &s[..len])?; - } - Ok(()) - } - SourceKind::Path => write!(f, "{}", url_display(&self.inner.url)), - SourceKind::Registry => write!(f, "registry `{}`", self.display_registry_name()), - SourceKind::LocalRegistry => write!(f, "registry `{}`", url_display(&self.inner.url)), - SourceKind::Directory => write!(f, "dir {}", url_display(&self.inner.url)), - } - } -} - -// The hash of SourceId is used in the name of some Cargo folders, so shouldn't -// vary. `as_str` gives the serialisation of a url (which has a spec) and so -// insulates against possible changes in how the url crate does hashing. -impl Hash for SourceId { - fn hash(&self, into: &mut S) { - self.inner.kind.hash(into); - match self.inner.kind { - SourceKind::Git(_) => self.inner.canonical_url.hash(into), - _ => self.inner.url.as_str().hash(into), - } - } -} - -impl Hash for SourceIdInner { - /// The hash of `SourceIdInner` is used to retrieve its interned value. We - /// only care about fields that make `SourceIdInner` unique, which are: - /// - /// - `kind` - /// - `precise` - /// - `canonical_url` - fn hash(&self, into: &mut S) { - self.kind.hash(into); - self.precise.hash(into); - self.canonical_url.hash(into); - } -} - -impl PartialEq for SourceIdInner { - /// This implementation must be synced with [`SourceIdInner::hash`]. - fn eq(&self, other: &Self) -> bool { - self.kind == other.kind - && self.precise == other.precise - && self.canonical_url == other.canonical_url - } -} - -// forward to `Ord` -impl PartialOrd for SourceKind { - fn partial_cmp(&self, other: &SourceKind) -> Option { - Some(self.cmp(other)) - } -} - -// Note that this is specifically not derived on `SourceKind` although the -// implementation here is very similar to what it might look like if it were -// otherwise derived. -// -// The reason for this is somewhat obtuse. First of all the hash value of -// `SourceKind` makes its way into `~/.cargo/registry/index/github.com-XXXX` -// which means that changes to the hash means that all Rust users need to -// redownload the crates.io index and all their crates. If possible we strive to -// not change this to make this redownloading behavior happen as little as -// possible. How is this connected to `Ord` you might ask? That's a good -// question! -// -// Since the beginning of time `SourceKind` has had `#[derive(Hash)]`. It for -// the longest time *also* derived the `Ord` and `PartialOrd` traits. In #8522, -// however, the implementation of `Ord` changed. This handwritten implementation -// forgot to sync itself with the originally derived implementation, namely -// placing git dependencies as sorted after all other dependencies instead of -// first as before. -// -// This regression in #8522 (Rust 1.47) went unnoticed. When we switched back -// to a derived implementation in #9133 (Rust 1.52 beta) we only then ironically -// saw an issue (#9334). In #9334 it was observed that stable Rust at the time -// (1.51) was sorting git dependencies last, whereas Rust 1.52 beta would sort -// git dependencies first. This is because the `PartialOrd` implementation in -// 1.51 used #8522, the buggy implementation, which put git deps last. In 1.52 -// it was (unknowingly) restored to the pre-1.47 behavior with git dependencies -// first. -// -// Because the breakage was only witnessed after the original breakage, this -// trait implementation is preserving the "broken" behavior. Put a different way: -// -// * Rust pre-1.47 sorted git deps first. -// * Rust 1.47 to Rust 1.51 sorted git deps last, a breaking change (#8522) that -// was never noticed. -// * Rust 1.52 restored the pre-1.47 behavior (#9133, without knowing it did -// so), and breakage was witnessed by actual users due to difference with -// 1.51. -// * Rust 1.52 (the source as it lives now) was fixed to match the 1.47-1.51 -// behavior (#9383), which is now considered intentionally breaking from the -// pre-1.47 behavior. -// -// Note that this was all discovered when Rust 1.53 was in nightly and 1.52 was -// in beta. #9133 was in both beta and nightly at the time of discovery. For -// 1.52 #9383 reverted #9133, meaning 1.52 is the same as 1.51. On nightly -// (1.53) #9397 was created to fix the regression introduced by #9133 relative -// to the current stable (1.51). -// -// That's all a long winded way of saying "it's wierd that git deps hash first -// and are sorted last, but it's the way it is right now". The author of this -// comment chose to handwrite the `Ord` implementation instead of the `Hash` -// implementation, but it's only required that at most one of them is -// hand-written because the other can be derived. Perhaps one day in -// the future someone can figure out how to remove this behavior. -impl Ord for SourceKind { - fn cmp(&self, other: &SourceKind) -> Ordering { - match (self, other) { - (SourceKind::Path, SourceKind::Path) => Ordering::Equal, - (SourceKind::Path, _) => Ordering::Less, - (_, SourceKind::Path) => Ordering::Greater, - - (SourceKind::Registry, SourceKind::Registry) => Ordering::Equal, - (SourceKind::Registry, _) => Ordering::Less, - (_, SourceKind::Registry) => Ordering::Greater, - - (SourceKind::LocalRegistry, SourceKind::LocalRegistry) => Ordering::Equal, - (SourceKind::LocalRegistry, _) => Ordering::Less, - (_, SourceKind::LocalRegistry) => Ordering::Greater, - - (SourceKind::Directory, SourceKind::Directory) => Ordering::Equal, - (SourceKind::Directory, _) => Ordering::Less, - (_, SourceKind::Directory) => Ordering::Greater, - - (SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b), - } - } -} - -// This is a test that the hash of the `SourceId` for crates.io is a well-known -// value. -// -// Note that the hash value matches what the crates.io source id has hashed -// since long before Rust 1.30. We strive to keep this value the same across -// versions of Cargo because changing it means that users will need to -// redownload the index and all crates they use when using a new Cargo version. -// -// This isn't to say that this hash can *never* change, only that when changing -// this it should be explicitly done. If this hash changes accidentally and -// you're able to restore the hash to its original value, please do so! -// Otherwise please just leave a comment in your PR as to why the hash value is -// changing and why the old value can't be easily preserved. -// -// The hash value depends on endianness and bit-width, so we only run this test on -// little-endian 64-bit CPUs (such as x86-64 and ARM64) where it matches the -// well-known value. -#[test] -#[cfg(all(target_endian = "little", target_pointer_width = "64"))] -fn test_cratesio_hash() { - let config = Config::default().unwrap(); - let crates_io = SourceId::crates_io(&config).unwrap(); - assert_eq!(crate::util::hex::short_hash(&crates_io), "1ecc6299db9ec823"); -} - -/// A `Display`able view into a `SourceId` that will write it as a url -pub struct SourceIdAsUrl<'a> { - inner: &'a SourceIdInner, -} - -impl<'a> fmt::Display for SourceIdAsUrl<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self.inner { - SourceIdInner { - kind: SourceKind::Path, - ref url, - .. - } => write!(f, "path+{}", url), - SourceIdInner { - kind: SourceKind::Git(ref reference), - ref url, - ref precise, - .. - } => { - write!(f, "git+{}", url)?; - if let Some(pretty) = reference.pretty_ref() { - write!(f, "?{}", pretty)?; - } - if let Some(precise) = precise.as_ref() { - write!(f, "#{}", precise)?; - } - Ok(()) - } - SourceIdInner { - kind: SourceKind::Registry, - ref url, - .. - } => write!(f, "registry+{}", url), - SourceIdInner { - kind: SourceKind::LocalRegistry, - ref url, - .. - } => write!(f, "local-registry+{}", url), - SourceIdInner { - kind: SourceKind::Directory, - ref url, - .. - } => write!(f, "directory+{}", url), - } - } -} - -impl GitReference { - /// Returns a `Display`able view of this git reference, or None if using - /// the head of the default branch - pub fn pretty_ref(&self) -> Option> { - match self { - GitReference::DefaultBranch => None, - _ => Some(PrettyRef { inner: self }), - } - } -} - -/// A git reference that can be `Display`ed -pub struct PrettyRef<'a> { - inner: &'a GitReference, -} - -impl<'a> fmt::Display for PrettyRef<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self.inner { - GitReference::Branch(ref b) => write!(f, "branch={}", b), - GitReference::Tag(ref s) => write!(f, "tag={}", s), - GitReference::Rev(ref s) => write!(f, "rev={}", s), - GitReference::DefaultBranch => unreachable!(), - } - } -} - -#[cfg(test)] -mod tests { - use super::{GitReference, SourceId, SourceKind}; - use crate::util::IntoUrl; - - #[test] - fn github_sources_equal() { - let loc = "https://github.com/foo/bar".into_url().unwrap(); - let default = SourceKind::Git(GitReference::DefaultBranch); - let s1 = SourceId::new(default.clone(), loc, None).unwrap(); - - let loc = "git://github.com/foo/bar".into_url().unwrap(); - let s2 = SourceId::new(default, loc.clone(), None).unwrap(); - - assert_eq!(s1, s2); - - let foo = SourceKind::Git(GitReference::Branch("foo".to_string())); - let s3 = SourceId::new(foo, loc, None).unwrap(); - assert_ne!(s1, s3); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/summary.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/summary.rs deleted file mode 100644 index 4f48fafa6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/summary.rs +++ /dev/null @@ -1,480 +0,0 @@ -use crate::core::{Dependency, PackageId, SourceId}; -use crate::util::interning::InternedString; -use crate::util::{CargoResult, Config}; -use anyhow::bail; -use semver::Version; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::mem; -use std::rc::Rc; - -/// Subset of a `Manifest`. Contains only the most important information about -/// a package. -/// -/// Summaries are cloned, and should not be mutated after creation -#[derive(Debug, Clone)] -pub struct Summary { - inner: Rc, -} - -#[derive(Debug, Clone)] -struct Inner { - package_id: PackageId, - dependencies: Vec, - features: Rc, - has_namespaced_features: bool, - has_overlapping_features: Option, - checksum: Option, - links: Option, -} - -impl Summary { - pub fn new( - config: &Config, - pkg_id: PackageId, - dependencies: Vec, - features: &BTreeMap>, - links: Option>, - ) -> CargoResult { - // ****CAUTION**** If you change anything here than may raise a new - // error, be sure to coordinate that change with either the index - // schema field or the SummariesCache version. - let mut has_overlapping_features = None; - for dep in dependencies.iter() { - let dep_name = dep.name_in_toml(); - if features.contains_key(&dep_name) { - has_overlapping_features = Some(dep_name); - } - if dep.is_optional() && !dep.is_transitive() { - bail!( - "dev-dependencies are not allowed to be optional: `{}`", - dep_name - ) - } - } - let (feature_map, has_namespaced_features) = - build_feature_map(config, pkg_id, features, &dependencies)?; - Ok(Summary { - inner: Rc::new(Inner { - package_id: pkg_id, - dependencies, - features: Rc::new(feature_map), - checksum: None, - links: links.map(|l| l.into()), - has_namespaced_features, - has_overlapping_features, - }), - }) - } - - pub fn package_id(&self) -> PackageId { - self.inner.package_id - } - pub fn name(&self) -> InternedString { - self.package_id().name() - } - pub fn version(&self) -> &Version { - self.package_id().version() - } - pub fn source_id(&self) -> SourceId { - self.package_id().source_id() - } - pub fn dependencies(&self) -> &[Dependency] { - &self.inner.dependencies - } - pub fn features(&self) -> &FeatureMap { - &self.inner.features - } - - /// Returns an error if this Summary is using an unstable feature that is - /// not enabled. - pub fn unstable_gate( - &self, - namespaced_features: bool, - weak_dep_features: bool, - ) -> CargoResult<()> { - if !namespaced_features { - if self.inner.has_namespaced_features { - bail!( - "namespaced features with the `dep:` prefix are only allowed on \ - the nightly channel and requires the `-Z namespaced-features` flag on the command-line" - ); - } - if let Some(dep_name) = self.inner.has_overlapping_features { - bail!( - "features and dependencies cannot have the same name: `{}`", - dep_name - ) - } - } - if !weak_dep_features { - for (feat_name, features) in self.features() { - for fv in features { - if matches!(fv, FeatureValue::DepFeature { weak: true, .. }) { - bail!( - "optional dependency features with `?` syntax are only \ - allowed on the nightly channel and requires the \ - `-Z weak-dep-features` flag on the command line\n\ - Feature `{}` had feature value `{}`.", - feat_name, - fv - ); - } - } - } - } - Ok(()) - } - - pub fn checksum(&self) -> Option<&str> { - self.inner.checksum.as_deref() - } - pub fn links(&self) -> Option { - self.inner.links - } - - pub fn override_id(mut self, id: PackageId) -> Summary { - Rc::make_mut(&mut self.inner).package_id = id; - self - } - - pub fn set_checksum(&mut self, cksum: String) { - Rc::make_mut(&mut self.inner).checksum = Some(cksum); - } - - pub fn map_dependencies(mut self, f: F) -> Summary - where - F: FnMut(Dependency) -> Dependency, - { - { - let slot = &mut Rc::make_mut(&mut self.inner).dependencies; - *slot = mem::take(slot).into_iter().map(f).collect(); - } - self - } - - pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Summary { - let me = if self.package_id().source_id() == to_replace { - let new_id = self.package_id().with_source_id(replace_with); - self.override_id(new_id) - } else { - self - }; - me.map_dependencies(|dep| dep.map_source(to_replace, replace_with)) - } -} - -impl PartialEq for Summary { - fn eq(&self, other: &Summary) -> bool { - self.inner.package_id == other.inner.package_id - } -} - -impl Eq for Summary {} - -impl Hash for Summary { - fn hash(&self, state: &mut H) { - self.inner.package_id.hash(state); - } -} - -/// Checks features for errors, bailing out a CargoResult:Err if invalid, -/// and creates FeatureValues for each feature. -/// -/// The returned `bool` indicates whether or not the `[features]` table -/// included a `dep:` prefixed namespaced feature (used for gating on -/// nightly). -fn build_feature_map( - config: &Config, - pkg_id: PackageId, - features: &BTreeMap>, - dependencies: &[Dependency], -) -> CargoResult<(FeatureMap, bool)> { - use self::FeatureValue::*; - let mut dep_map = HashMap::new(); - for dep in dependencies.iter() { - dep_map - .entry(dep.name_in_toml()) - .or_insert_with(Vec::new) - .push(dep); - } - - let mut map: FeatureMap = features - .iter() - .map(|(feature, list)| { - let fvs: Vec<_> = list - .iter() - .map(|feat_value| FeatureValue::new(*feat_value)) - .collect(); - (*feature, fvs) - }) - .collect(); - let has_namespaced_features = map.values().flatten().any(|fv| fv.has_dep_prefix()); - - // Add implicit features for optional dependencies if they weren't - // explicitly listed anywhere. - let explicitly_listed: HashSet<_> = map - .values() - .flatten() - .filter_map(|fv| match fv { - Dep { dep_name } => Some(*dep_name), - _ => None, - }) - .collect(); - for dep in dependencies { - if !dep.is_optional() { - continue; - } - let dep_name_in_toml = dep.name_in_toml(); - if features.contains_key(&dep_name_in_toml) || explicitly_listed.contains(&dep_name_in_toml) - { - continue; - } - let fv = Dep { - dep_name: dep_name_in_toml, - }; - map.insert(dep_name_in_toml, vec![fv]); - } - - // Validate features are listed properly. - for (feature, fvs) in &map { - if feature.starts_with("dep:") { - bail!( - "feature named `{}` is not allowed to start with `dep:`", - feature - ); - } - if feature.contains('/') { - bail!( - "feature named `{}` is not allowed to contain slashes", - feature - ); - } - validate_feature_name(config, pkg_id, feature)?; - for fv in fvs { - // Find data for the referenced dependency... - let dep_data = { - match fv { - Feature(dep_name) | Dep { dep_name, .. } | DepFeature { dep_name, .. } => { - dep_map.get(dep_name) - } - } - }; - let is_optional_dep = dep_data - .iter() - .flat_map(|d| d.iter()) - .any(|d| d.is_optional()); - let is_any_dep = dep_data.is_some(); - match fv { - Feature(f) => { - if !features.contains_key(f) { - if !is_any_dep { - bail!( - "feature `{}` includes `{}` which is neither a dependency \ - nor another feature", - feature, - fv - ); - } - if is_optional_dep { - if !map.contains_key(f) { - bail!( - "feature `{}` includes `{}`, but `{}` is an \ - optional dependency without an implicit feature\n\ - Use `dep:{}` to enable the dependency.", - feature, - fv, - f, - f - ); - } - } else { - bail!("feature `{}` includes `{}`, but `{}` is not an optional dependency\n\ - A non-optional dependency of the same name is defined; \ - consider adding `optional = true` to its definition.", - feature, fv, f); - } - } - } - Dep { dep_name } => { - if !is_any_dep { - bail!( - "feature `{}` includes `{}`, but `{}` is not listed as a dependency", - feature, - fv, - dep_name - ); - } - if !is_optional_dep { - bail!( - "feature `{}` includes `{}`, but `{}` is not an optional dependency\n\ - A non-optional dependency of the same name is defined; \ - consider adding `optional = true` to its definition.", - feature, - fv, - dep_name - ); - } - } - DepFeature { - dep_name, - dep_feature, - weak, - .. - } => { - // Early check for some unlikely syntax. - if dep_feature.contains('/') { - bail!( - "multiple slashes in feature `{}` (included by feature `{}`) are not allowed", - fv, - feature - ); - } - // Validation of the feature name will be performed in the resolver. - if !is_any_dep { - bail!( - "feature `{}` includes `{}`, but `{}` is not a dependency", - feature, - fv, - dep_name - ); - } - if *weak && !is_optional_dep { - bail!("feature `{}` includes `{}` with a `?`, but `{}` is not an optional dependency\n\ - A non-optional dependency of the same name is defined; \ - consider removing the `?` or changing the dependency to be optional", - feature, fv, dep_name); - } - } - } - } - } - - // Make sure every optional dep is mentioned at least once. - let used: HashSet<_> = map - .values() - .flatten() - .filter_map(|fv| match fv { - Dep { dep_name } | DepFeature { dep_name, .. } => Some(dep_name), - _ => None, - }) - .collect(); - if let Some(dep) = dependencies - .iter() - .find(|dep| dep.is_optional() && !used.contains(&dep.name_in_toml())) - { - bail!( - "optional dependency `{}` is not included in any feature\n\ - Make sure that `dep:{}` is included in one of features in the [features] table.", - dep.name_in_toml(), - dep.name_in_toml(), - ); - } - - Ok((map, has_namespaced_features)) -} - -/// FeatureValue represents the types of dependencies a feature can have. -#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] -pub enum FeatureValue { - /// A feature enabling another feature. - Feature(InternedString), - /// A feature enabling a dependency with `dep:dep_name` syntax. - Dep { dep_name: InternedString }, - /// A feature enabling a feature on a dependency with `crate_name/feat_name` syntax. - DepFeature { - dep_name: InternedString, - dep_feature: InternedString, - /// If `true`, indicates the `?` syntax is used, which means this will - /// not automatically enable the dependency unless the dependency is - /// activated through some other means. - weak: bool, - }, -} - -impl FeatureValue { - pub fn new(feature: InternedString) -> FeatureValue { - match feature.find('/') { - Some(pos) => { - let (dep, dep_feat) = feature.split_at(pos); - let dep_feat = &dep_feat[1..]; - let (dep, weak) = if let Some(dep) = dep.strip_suffix('?') { - (dep, true) - } else { - (dep, false) - }; - FeatureValue::DepFeature { - dep_name: InternedString::new(dep), - dep_feature: InternedString::new(dep_feat), - weak, - } - } - None => { - if let Some(dep_name) = feature.strip_prefix("dep:") { - FeatureValue::Dep { - dep_name: InternedString::new(dep_name), - } - } else { - FeatureValue::Feature(feature) - } - } - } - } - - /// Returns `true` if this feature explicitly used `dep:` syntax. - pub fn has_dep_prefix(&self) -> bool { - matches!(self, FeatureValue::Dep { .. }) - } -} - -impl fmt::Display for FeatureValue { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - use self::FeatureValue::*; - match self { - Feature(feat) => write!(f, "{}", feat), - Dep { dep_name } => write!(f, "dep:{}", dep_name), - DepFeature { - dep_name, - dep_feature, - weak, - } => { - let weak = if *weak { "?" } else { "" }; - write!(f, "{}{}/{}", dep_name, weak, dep_feature) - } - } - } -} - -pub type FeatureMap = BTreeMap>; - -fn validate_feature_name(config: &Config, pkg_id: PackageId, name: &str) -> CargoResult<()> { - let mut chars = name.chars(); - const FUTURE: &str = "This was previously accepted but is being phased out; \ - it will become a hard error in a future release.\n\ - For more information, see issue #8813 , \ - and please leave a comment if this will be a problem for your project."; - if let Some(ch) = chars.next() { - if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_' || ch.is_digit(10)) { - config.shell().warn(&format!( - "invalid character `{}` in feature `{}` in package {}, \ - the first character must be a Unicode XID start character or digit \ - (most letters or `_` or `0` to `9`)\n\ - {}", - ch, name, pkg_id, FUTURE - ))?; - } - } - for ch in chars { - if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-' || ch == '+' || ch == '.') { - config.shell().warn(&format!( - "invalid character `{}` in feature `{}` in package {}, \ - characters must be Unicode XID characters, `+`, or `.` \ - (numbers, `+`, `-`, `_`, `.`, or most letters)\n\ - {}", - ch, name, pkg_id, FUTURE - ))?; - } - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/workspace.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/workspace.rs deleted file mode 100644 index a5b1de064..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/core/workspace.rs +++ /dev/null @@ -1,1642 +0,0 @@ -use std::cell::RefCell; -use std::collections::hash_map::{Entry, HashMap}; -use std::collections::{BTreeMap, BTreeSet, HashSet}; -use std::path::{Path, PathBuf}; -use std::rc::Rc; - -use anyhow::{bail, Context as _}; -use glob::glob; -use itertools::Itertools; -use log::debug; -use url::Url; - -use crate::core::features::Features; -use crate::core::registry::PackageRegistry; -use crate::core::resolver::features::CliFeatures; -use crate::core::resolver::ResolveBehavior; -use crate::core::{Dependency, Edition, FeatureValue, PackageId, PackageIdSpec}; -use crate::core::{EitherManifest, Package, SourceId, VirtualManifest}; -use crate::ops; -use crate::sources::{PathSource, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; -use crate::util::errors::{CargoResult, ManifestError}; -use crate::util::interning::InternedString; -use crate::util::lev_distance; -use crate::util::toml::{read_manifest, TomlDependency, TomlProfiles}; -use crate::util::{config::ConfigRelativePath, Config, Filesystem, IntoUrl}; -use cargo_util::paths; - -/// The core abstraction in Cargo for working with a workspace of crates. -/// -/// A workspace is often created very early on and then threaded through all -/// other functions. It's typically through this object that the current -/// package is loaded and/or learned about. -#[derive(Debug)] -pub struct Workspace<'cfg> { - config: &'cfg Config, - - // This path is a path to where the current cargo subcommand was invoked - // from. That is the `--manifest-path` argument to Cargo, and - // points to the "main crate" that we're going to worry about. - current_manifest: PathBuf, - - // A list of packages found in this workspace. Always includes at least the - // package mentioned by `current_manifest`. - packages: Packages<'cfg>, - - // If this workspace includes more than one crate, this points to the root - // of the workspace. This is `None` in the case that `[workspace]` is - // missing, `package.workspace` is missing, and no `Cargo.toml` above - // `current_manifest` was found on the filesystem with `[workspace]`. - root_manifest: Option, - - // Shared target directory for all the packages of this workspace. - // `None` if the default path of `root/target` should be used. - target_dir: Option, - - // List of members in this workspace with a listing of all their manifest - // paths. The packages themselves can be looked up through the `packages` - // set above. - members: Vec, - member_ids: HashSet, - - // The subset of `members` that are used by the - // `build`, `check`, `test`, and `bench` subcommands - // when no package is selected with `--package` / `-p` and `--workspace` - // is not used. - // - // This is set by the `default-members` config - // in the `[workspace]` section. - // When unset, this is the same as `members` for virtual workspaces - // (`--workspace` is implied) - // or only the root package for non-virtual workspaces. - default_members: Vec, - - // `true` if this is a temporary workspace created for the purposes of the - // `cargo install` or `cargo package` commands. - is_ephemeral: bool, - - // `true` if this workspace should enforce optional dependencies even when - // not needed; false if this workspace should only enforce dependencies - // needed by the current configuration (such as in cargo install). In some - // cases `false` also results in the non-enforcement of dev-dependencies. - require_optional_deps: bool, - - // A cache of loaded packages for particular paths which is disjoint from - // `packages` up above, used in the `load` method down below. - loaded_packages: RefCell>, - - // If `true`, then the resolver will ignore any existing `Cargo.lock` - // file. This is set for `cargo install` without `--locked`. - ignore_lock: bool, - - /// The resolver behavior specified with the `resolver` field. - resolve_behavior: ResolveBehavior, - - /// Workspace-level custom metadata - custom_metadata: Option, -} - -// Separate structure for tracking loaded packages (to avoid loading anything -// twice), and this is separate to help appease the borrow checker. -#[derive(Debug)] -struct Packages<'cfg> { - config: &'cfg Config, - packages: HashMap, -} - -#[derive(Debug)] -pub enum MaybePackage { - Package(Package), - Virtual(VirtualManifest), -} - -/// Configuration of a workspace in a manifest. -#[derive(Debug, Clone)] -pub enum WorkspaceConfig { - /// Indicates that `[workspace]` was present and the members were - /// optionally specified as well. - Root(WorkspaceRootConfig), - - /// Indicates that `[workspace]` was present and the `root` field is the - /// optional value of `package.workspace`, if present. - Member { root: Option }, -} - -/// Intermediate configuration of a workspace root in a manifest. -/// -/// Knows the Workspace Root path, as well as `members` and `exclude` lists of path patterns, which -/// together tell if some path is recognized as a member by this root or not. -#[derive(Debug, Clone)] -pub struct WorkspaceRootConfig { - root_dir: PathBuf, - members: Option>, - default_members: Option>, - exclude: Vec, - custom_metadata: Option, -} - -impl<'cfg> Workspace<'cfg> { - /// Creates a new workspace given the target manifest pointed to by - /// `manifest_path`. - /// - /// This function will construct the entire workspace by determining the - /// root and all member packages. It will then validate the workspace - /// before returning it, so `Ok` is only returned for valid workspaces. - pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult> { - let mut ws = Workspace::new_default(manifest_path.to_path_buf(), config); - ws.target_dir = config.target_dir()?; - - if manifest_path.is_relative() { - bail!( - "manifest_path:{:?} is not an absolute path. Please provide an absolute path.", - manifest_path - ) - } else { - ws.root_manifest = ws.find_root(manifest_path)?; - } - - ws.custom_metadata = ws - .load_workspace_config()? - .and_then(|cfg| cfg.custom_metadata); - ws.find_members()?; - ws.set_resolve_behavior(); - ws.validate()?; - Ok(ws) - } - - fn new_default(current_manifest: PathBuf, config: &'cfg Config) -> Workspace<'cfg> { - Workspace { - config, - current_manifest, - packages: Packages { - config, - packages: HashMap::new(), - }, - root_manifest: None, - target_dir: None, - members: Vec::new(), - member_ids: HashSet::new(), - default_members: Vec::new(), - is_ephemeral: false, - require_optional_deps: true, - loaded_packages: RefCell::new(HashMap::new()), - ignore_lock: false, - resolve_behavior: ResolveBehavior::V1, - custom_metadata: None, - } - } - - pub fn new_virtual( - root_path: PathBuf, - current_manifest: PathBuf, - manifest: VirtualManifest, - config: &'cfg Config, - ) -> CargoResult> { - let mut ws = Workspace::new_default(current_manifest, config); - ws.root_manifest = Some(root_path.join("Cargo.toml")); - ws.target_dir = config.target_dir()?; - ws.packages - .packages - .insert(root_path, MaybePackage::Virtual(manifest)); - ws.find_members()?; - ws.set_resolve_behavior(); - // TODO: validation does not work because it walks up the directory - // tree looking for the root which is a fake file that doesn't exist. - Ok(ws) - } - - /// Creates a "temporary workspace" from one package which only contains - /// that package. - /// - /// This constructor will not touch the filesystem and only creates an - /// in-memory workspace. That is, all configuration is ignored, it's just - /// intended for that one package. - /// - /// This is currently only used in niche situations like `cargo install` or - /// `cargo package`. - pub fn ephemeral( - package: Package, - config: &'cfg Config, - target_dir: Option, - require_optional_deps: bool, - ) -> CargoResult> { - let mut ws = Workspace::new_default(package.manifest_path().to_path_buf(), config); - ws.is_ephemeral = true; - ws.require_optional_deps = require_optional_deps; - let key = ws.current_manifest.parent().unwrap(); - let id = package.package_id(); - let package = MaybePackage::Package(package); - ws.packages.packages.insert(key.to_path_buf(), package); - ws.target_dir = if let Some(dir) = target_dir { - Some(dir) - } else { - ws.config.target_dir()? - }; - ws.members.push(ws.current_manifest.clone()); - ws.member_ids.insert(id); - ws.default_members.push(ws.current_manifest.clone()); - ws.set_resolve_behavior(); - Ok(ws) - } - - fn set_resolve_behavior(&mut self) { - // - If resolver is specified in the workspace definition, use that. - // - If the root package specifies the resolver, use that. - // - If the root package specifies edition 2021, use v2. - // - Otherwise, use the default v1. - self.resolve_behavior = match self.root_maybe() { - MaybePackage::Package(p) => p.manifest().resolve_behavior().or_else(|| { - if p.manifest().edition() >= Edition::Edition2021 { - Some(ResolveBehavior::V2) - } else { - None - } - }), - MaybePackage::Virtual(vm) => vm.resolve_behavior(), - } - .unwrap_or(ResolveBehavior::V1); - } - - /// Returns the current package of this workspace. - /// - /// Note that this can return an error if it the current manifest is - /// actually a "virtual Cargo.toml", in which case an error is returned - /// indicating that something else should be passed. - pub fn current(&self) -> CargoResult<&Package> { - let pkg = self.current_opt().ok_or_else(|| { - anyhow::format_err!( - "manifest path `{}` is a virtual manifest, but this \ - command requires running against an actual package in \ - this workspace", - self.current_manifest.display() - ) - })?; - Ok(pkg) - } - - pub fn current_mut(&mut self) -> CargoResult<&mut Package> { - let cm = self.current_manifest.clone(); - let pkg = self.current_opt_mut().ok_or_else(|| { - anyhow::format_err!( - "manifest path `{}` is a virtual manifest, but this \ - command requires running against an actual package in \ - this workspace", - cm.display() - ) - })?; - Ok(pkg) - } - - pub fn current_opt(&self) -> Option<&Package> { - match *self.packages.get(&self.current_manifest) { - MaybePackage::Package(ref p) => Some(p), - MaybePackage::Virtual(..) => None, - } - } - - pub fn current_opt_mut(&mut self) -> Option<&mut Package> { - match *self.packages.get_mut(&self.current_manifest) { - MaybePackage::Package(ref mut p) => Some(p), - MaybePackage::Virtual(..) => None, - } - } - - pub fn is_virtual(&self) -> bool { - match *self.packages.get(&self.current_manifest) { - MaybePackage::Package(..) => false, - MaybePackage::Virtual(..) => true, - } - } - - /// Returns the `Config` this workspace is associated with. - pub fn config(&self) -> &'cfg Config { - self.config - } - - pub fn profiles(&self) -> Option<&TomlProfiles> { - match self.root_maybe() { - MaybePackage::Package(p) => p.manifest().profiles(), - MaybePackage::Virtual(vm) => vm.profiles(), - } - } - - /// Returns the root path of this workspace. - /// - /// That is, this returns the path of the directory containing the - /// `Cargo.toml` which is the root of this workspace. - pub fn root(&self) -> &Path { - self.root_manifest().parent().unwrap() - } - - /// Returns the path of the `Cargo.toml` which is the root of this - /// workspace. - pub fn root_manifest(&self) -> &Path { - self.root_manifest - .as_ref() - .unwrap_or(&self.current_manifest) - } - - /// Returns the root Package or VirtualManifest. - pub fn root_maybe(&self) -> &MaybePackage { - self.packages.get(self.root_manifest()) - } - - pub fn target_dir(&self) -> Filesystem { - self.target_dir - .clone() - .unwrap_or_else(|| Filesystem::new(self.root().join("target"))) - } - - /// Returns the root `[replace]` section of this workspace. - /// - /// This may be from a virtual crate or an actual crate. - pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] { - match self.root_maybe() { - MaybePackage::Package(p) => p.manifest().replace(), - MaybePackage::Virtual(vm) => vm.replace(), - } - } - - fn config_patch(&self) -> CargoResult>> { - let config_patch: Option< - BTreeMap>>, - > = self.config.get("patch")?; - - let source = SourceId::for_path(self.root())?; - - let mut warnings = Vec::new(); - let mut nested_paths = Vec::new(); - - let mut patch = HashMap::new(); - for (url, deps) in config_patch.into_iter().flatten() { - let url = match &url[..] { - CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(), - url => self - .config - .get_registry_index(url) - .or_else(|_| url.into_url()) - .with_context(|| { - format!("[patch] entry `{}` should be a URL or registry name", url) - })?, - }; - patch.insert( - url, - deps.iter() - .map(|(name, dep)| { - dep.to_dependency_split( - name, - source, - &mut nested_paths, - self.config, - &mut warnings, - /* platform */ None, - // NOTE: Since we use ConfigRelativePath, this root isn't used as - // any relative paths are resolved before they'd be joined with root. - Path::new("unused-relative-path"), - self.unstable_features(), - /* kind */ None, - ) - }) - .collect::>>()?, - ); - } - - for message in warnings { - self.config - .shell() - .warn(format!("[patch] in cargo config: {}", message))? - } - - Ok(patch) - } - - /// Returns the root `[patch]` section of this workspace. - /// - /// This may be from a virtual crate or an actual crate. - pub fn root_patch(&self) -> CargoResult>> { - let from_manifest = match self.root_maybe() { - MaybePackage::Package(p) => p.manifest().patch(), - MaybePackage::Virtual(vm) => vm.patch(), - }; - - let from_config = self.config_patch()?; - if from_config.is_empty() { - return Ok(from_manifest.clone()); - } - if from_manifest.is_empty() { - return Ok(from_config); - } - - // We could just chain from_manifest and from_config, - // but that's not quite right as it won't deal with overlaps. - let mut combined = from_config; - for (url, deps_from_manifest) in from_manifest { - if let Some(deps_from_config) = combined.get_mut(url) { - // We want from_config to take precedence for each patched name. - // NOTE: This is inefficient if the number of patches is large! - let mut from_manifest_pruned = deps_from_manifest.clone(); - for dep_from_config in &mut *deps_from_config { - if let Some(i) = from_manifest_pruned.iter().position(|dep_from_manifest| { - // XXX: should this also take into account version numbers? - dep_from_config.name_in_toml() == dep_from_manifest.name_in_toml() - }) { - from_manifest_pruned.swap_remove(i); - } - } - // Whatever is left does not exist in manifest dependencies. - deps_from_config.extend(from_manifest_pruned); - } else { - combined.insert(url.clone(), deps_from_manifest.clone()); - } - } - Ok(combined) - } - - /// Returns an iterator over all packages in this workspace - pub fn members(&self) -> impl Iterator { - let packages = &self.packages; - self.members - .iter() - .filter_map(move |path| match packages.get(path) { - &MaybePackage::Package(ref p) => Some(p), - _ => None, - }) - } - - /// Returns a mutable iterator over all packages in this workspace - pub fn members_mut(&mut self) -> impl Iterator { - let packages = &mut self.packages.packages; - let members: HashSet<_> = self - .members - .iter() - .map(|path| path.parent().unwrap().to_owned()) - .collect(); - - packages.iter_mut().filter_map(move |(path, package)| { - if members.contains(path) { - if let MaybePackage::Package(ref mut p) = package { - return Some(p); - } - } - - None - }) - } - - /// Returns an iterator over default packages in this workspace - pub fn default_members<'a>(&'a self) -> impl Iterator { - let packages = &self.packages; - self.default_members - .iter() - .filter_map(move |path| match packages.get(path) { - &MaybePackage::Package(ref p) => Some(p), - _ => None, - }) - } - - /// Returns an iterator over default packages in this workspace - pub fn default_members_mut(&mut self) -> impl Iterator { - let packages = &mut self.packages.packages; - let members: HashSet<_> = self - .default_members - .iter() - .map(|path| path.parent().unwrap().to_owned()) - .collect(); - - packages.iter_mut().filter_map(move |(path, package)| { - if members.contains(path) { - if let MaybePackage::Package(ref mut p) = package { - return Some(p); - } - } - - None - }) - } - - /// Returns true if the package is a member of the workspace. - pub fn is_member(&self, pkg: &Package) -> bool { - self.member_ids.contains(&pkg.package_id()) - } - - pub fn is_ephemeral(&self) -> bool { - self.is_ephemeral - } - - pub fn require_optional_deps(&self) -> bool { - self.require_optional_deps - } - - pub fn set_require_optional_deps( - &mut self, - require_optional_deps: bool, - ) -> &mut Workspace<'cfg> { - self.require_optional_deps = require_optional_deps; - self - } - - pub fn ignore_lock(&self) -> bool { - self.ignore_lock - } - - pub fn set_ignore_lock(&mut self, ignore_lock: bool) -> &mut Workspace<'cfg> { - self.ignore_lock = ignore_lock; - self - } - - pub fn custom_metadata(&self) -> Option<&toml::Value> { - self.custom_metadata.as_ref() - } - - pub fn load_workspace_config(&mut self) -> CargoResult> { - // If we didn't find a root, it must mean there is no [workspace] section, and thus no - // metadata. - if let Some(root_path) = &self.root_manifest { - let root_package = self.packages.load(root_path)?; - match root_package.workspace_config() { - WorkspaceConfig::Root(ref root_config) => { - return Ok(Some(root_config.clone())); - } - - _ => bail!( - "root of a workspace inferred but wasn't a root: {}", - root_path.display() - ), - } - } - - Ok(None) - } - - /// Finds the root of a workspace for the crate whose manifest is located - /// at `manifest_path`. - /// - /// This will parse the `Cargo.toml` at `manifest_path` and then interpret - /// the workspace configuration, optionally walking up the filesystem - /// looking for other workspace roots. - /// - /// Returns an error if `manifest_path` isn't actually a valid manifest or - /// if some other transient error happens. - fn find_root(&mut self, manifest_path: &Path) -> CargoResult> { - fn read_root_pointer(member_manifest: &Path, root_link: &str) -> PathBuf { - let path = member_manifest - .parent() - .unwrap() - .join(root_link) - .join("Cargo.toml"); - debug!("find_root - pointer {}", path.display()); - paths::normalize_path(&path) - } - - { - let current = self.packages.load(manifest_path)?; - match *current.workspace_config() { - WorkspaceConfig::Root(_) => { - debug!("find_root - is root {}", manifest_path.display()); - return Ok(Some(manifest_path.to_path_buf())); - } - WorkspaceConfig::Member { - root: Some(ref path_to_root), - } => return Ok(Some(read_root_pointer(manifest_path, path_to_root))), - WorkspaceConfig::Member { root: None } => {} - } - } - - for path in paths::ancestors(manifest_path, None).skip(2) { - if path.ends_with("target/package") { - break; - } - - let ances_manifest_path = path.join("Cargo.toml"); - debug!("find_root - trying {}", ances_manifest_path.display()); - if ances_manifest_path.exists() { - match *self.packages.load(&ances_manifest_path)?.workspace_config() { - WorkspaceConfig::Root(ref ances_root_config) => { - debug!("find_root - found a root checking exclusion"); - if !ances_root_config.is_excluded(manifest_path) { - debug!("find_root - found!"); - return Ok(Some(ances_manifest_path)); - } - } - WorkspaceConfig::Member { - root: Some(ref path_to_root), - } => { - debug!("find_root - found pointer"); - return Ok(Some(read_root_pointer(&ances_manifest_path, path_to_root))); - } - WorkspaceConfig::Member { .. } => {} - } - } - - // Don't walk across `CARGO_HOME` when we're looking for the - // workspace root. Sometimes a package will be organized with - // `CARGO_HOME` pointing inside of the workspace root or in the - // current package, but we don't want to mistakenly try to put - // crates.io crates into the workspace by accident. - if self.config.home() == path { - break; - } - } - - Ok(None) - } - - /// After the root of a workspace has been located, probes for all members - /// of a workspace. - /// - /// If the `workspace.members` configuration is present, then this just - /// verifies that those are all valid packages to point to. Otherwise, this - /// will transitively follow all `path` dependencies looking for members of - /// the workspace. - fn find_members(&mut self) -> CargoResult<()> { - let workspace_config = match self.load_workspace_config()? { - Some(workspace_config) => workspace_config, - None => { - debug!("find_members - only me as a member"); - self.members.push(self.current_manifest.clone()); - self.default_members.push(self.current_manifest.clone()); - if let Ok(pkg) = self.current() { - let id = pkg.package_id(); - self.member_ids.insert(id); - } - return Ok(()); - } - }; - - // self.root_manifest must be Some to have retrieved workspace_config - let root_manifest_path = self.root_manifest.clone().unwrap(); - - let members_paths = - workspace_config.members_paths(workspace_config.members.as_ref().unwrap_or(&vec![]))?; - let default_members_paths = if root_manifest_path == self.current_manifest { - if let Some(ref default) = workspace_config.default_members { - Some(workspace_config.members_paths(default)?) - } else { - None - } - } else { - None - }; - - for path in &members_paths { - self.find_path_deps(&path.join("Cargo.toml"), &root_manifest_path, false) - .with_context(|| { - format!( - "failed to load manifest for workspace member `{}`", - path.display() - ) - })?; - } - - if let Some(default) = default_members_paths { - for path in default { - let normalized_path = paths::normalize_path(&path); - let manifest_path = normalized_path.join("Cargo.toml"); - if !self.members.contains(&manifest_path) { - // default-members are allowed to be excluded, but they - // still must be referred to by the original (unfiltered) - // members list. Note that we aren't testing against the - // manifest path, both because `members_paths` doesn't - // include `/Cargo.toml`, and because excluded paths may not - // be crates. - let exclude = members_paths.contains(&normalized_path) - && workspace_config.is_excluded(&normalized_path); - if exclude { - continue; - } - bail!( - "package `{}` is listed in workspaceโ€™s default-members \ - but is not a member.", - path.display() - ) - } - self.default_members.push(manifest_path) - } - } else if self.is_virtual() { - self.default_members = self.members.clone() - } else { - self.default_members.push(self.current_manifest.clone()) - } - - self.find_path_deps(&root_manifest_path, &root_manifest_path, false) - } - - fn find_path_deps( - &mut self, - manifest_path: &Path, - root_manifest: &Path, - is_path_dep: bool, - ) -> CargoResult<()> { - let manifest_path = paths::normalize_path(manifest_path); - if self.members.contains(&manifest_path) { - return Ok(()); - } - if is_path_dep - && !manifest_path.parent().unwrap().starts_with(self.root()) - && self.find_root(&manifest_path)? != self.root_manifest - { - // If `manifest_path` is a path dependency outside of the workspace, - // don't add it, or any of its dependencies, as a members. - return Ok(()); - } - - if let WorkspaceConfig::Root(ref root_config) = - *self.packages.load(root_manifest)?.workspace_config() - { - if root_config.is_excluded(&manifest_path) { - return Ok(()); - } - } - - debug!("find_members - {}", manifest_path.display()); - self.members.push(manifest_path.clone()); - - let candidates = { - let pkg = match *self.packages.load(&manifest_path)? { - MaybePackage::Package(ref p) => p, - MaybePackage::Virtual(_) => return Ok(()), - }; - self.member_ids.insert(pkg.package_id()); - pkg.dependencies() - .iter() - .map(|d| (d.source_id(), d.package_name())) - .filter(|(s, _)| s.is_path()) - .filter_map(|(s, n)| s.url().to_file_path().ok().map(|p| (p, n))) - .map(|(p, n)| (p.join("Cargo.toml"), n)) - .collect::>() - }; - for (path, name) in candidates { - self.find_path_deps(&path, root_manifest, true) - .with_context(|| format!("failed to load manifest for dependency `{}`", name)) - .map_err(|err| ManifestError::new(err, manifest_path.clone()))?; - } - Ok(()) - } - - /// Returns the unstable nightly-only features enabled via `cargo-features` in the manifest. - pub fn unstable_features(&self) -> &Features { - match self.root_maybe() { - MaybePackage::Package(p) => p.manifest().unstable_features(), - MaybePackage::Virtual(vm) => vm.unstable_features(), - } - } - - pub fn resolve_behavior(&self) -> ResolveBehavior { - self.resolve_behavior - } - - /// Returns `true` if this workspace uses the new CLI features behavior. - /// - /// The old behavior only allowed choosing the features from the package - /// in the current directory, regardless of which packages were chosen - /// with the -p flags. The new behavior allows selecting features from the - /// packages chosen on the command line (with -p or --workspace flags), - /// ignoring whatever is in the current directory. - pub fn allows_new_cli_feature_behavior(&self) -> bool { - self.is_virtual() - || match self.resolve_behavior() { - ResolveBehavior::V1 => false, - ResolveBehavior::V2 => true, - } - } - - /// Validates a workspace, ensuring that a number of invariants are upheld: - /// - /// 1. A workspace only has one root. - /// 2. All workspace members agree on this one root as the root. - /// 3. The current crate is a member of this workspace. - fn validate(&mut self) -> CargoResult<()> { - // The rest of the checks require a VirtualManifest or multiple members. - if self.root_manifest.is_none() { - return Ok(()); - } - - self.validate_unique_names()?; - self.validate_workspace_roots()?; - self.validate_members()?; - self.error_if_manifest_not_in_members()?; - self.validate_manifest() - } - - fn validate_unique_names(&self) -> CargoResult<()> { - let mut names = BTreeMap::new(); - for member in self.members.iter() { - let package = self.packages.get(member); - let name = match *package { - MaybePackage::Package(ref p) => p.name(), - MaybePackage::Virtual(_) => continue, - }; - if let Some(prev) = names.insert(name, member) { - bail!( - "two packages named `{}` in this workspace:\n\ - - {}\n\ - - {}", - name, - prev.display(), - member.display() - ); - } - } - Ok(()) - } - - fn validate_workspace_roots(&self) -> CargoResult<()> { - let roots: Vec = self - .members - .iter() - .filter(|&member| { - let config = self.packages.get(member).workspace_config(); - matches!(config, WorkspaceConfig::Root(_)) - }) - .map(|member| member.parent().unwrap().to_path_buf()) - .collect(); - match roots.len() { - 1 => Ok(()), - 0 => bail!( - "`package.workspace` configuration points to a crate \ - which is not configured with [workspace]: \n\ - configuration at: {}\n\ - points to: {}", - self.current_manifest.display(), - self.root_manifest.as_ref().unwrap().display() - ), - _ => { - bail!( - "multiple workspace roots found in the same workspace:\n{}", - roots - .iter() - .map(|r| format!(" {}", r.display())) - .collect::>() - .join("\n") - ); - } - } - } - - fn validate_members(&mut self) -> CargoResult<()> { - for member in self.members.clone() { - let root = self.find_root(&member)?; - if root == self.root_manifest { - continue; - } - - match root { - Some(root) => { - bail!( - "package `{}` is a member of the wrong workspace\n\ - expected: {}\n\ - actual: {}", - member.display(), - self.root_manifest.as_ref().unwrap().display(), - root.display() - ); - } - None => { - bail!( - "workspace member `{}` is not hierarchically below \ - the workspace root `{}`", - member.display(), - self.root_manifest.as_ref().unwrap().display() - ); - } - } - } - Ok(()) - } - - fn error_if_manifest_not_in_members(&mut self) -> CargoResult<()> { - if self.members.contains(&self.current_manifest) { - return Ok(()); - } - - let root = self.root_manifest.as_ref().unwrap(); - let root_dir = root.parent().unwrap(); - let current_dir = self.current_manifest.parent().unwrap(); - let root_pkg = self.packages.get(root); - - // FIXME: Make this more generic by using a relative path resolver between member and root. - let members_msg = match current_dir.strip_prefix(root_dir) { - Ok(rel) => format!( - "this may be fixable by adding `{}` to the \ - `workspace.members` array of the manifest \ - located at: {}", - rel.display(), - root.display() - ), - Err(_) => format!( - "this may be fixable by adding a member to \ - the `workspace.members` array of the \ - manifest located at: {}", - root.display() - ), - }; - let extra = match *root_pkg { - MaybePackage::Virtual(_) => members_msg, - MaybePackage::Package(ref p) => { - let has_members_list = match *p.manifest().workspace_config() { - WorkspaceConfig::Root(ref root_config) => root_config.has_members_list(), - WorkspaceConfig::Member { .. } => unreachable!(), - }; - if !has_members_list { - format!( - "this may be fixable by ensuring that this \ - crate is depended on by the workspace \ - root: {}", - root.display() - ) - } else { - members_msg - } - } - }; - bail!( - "current package believes it's in a workspace when it's not:\n\ - current: {}\n\ - workspace: {}\n\n{}\n\ - Alternatively, to keep it out of the workspace, add the package \ - to the `workspace.exclude` array, or add an empty `[workspace]` \ - table to the package's manifest.", - self.current_manifest.display(), - root.display(), - extra - ); - } - - fn validate_manifest(&mut self) -> CargoResult<()> { - if let Some(ref root_manifest) = self.root_manifest { - for pkg in self - .members() - .filter(|p| p.manifest_path() != root_manifest) - { - let manifest = pkg.manifest(); - let emit_warning = |what| -> CargoResult<()> { - let msg = format!( - "{} for the non root package will be ignored, \ - specify {} at the workspace root:\n\ - package: {}\n\ - workspace: {}", - what, - what, - pkg.manifest_path().display(), - root_manifest.display(), - ); - self.config.shell().warn(&msg) - }; - if manifest.original().has_profiles() { - emit_warning("profiles")?; - } - if !manifest.replace().is_empty() { - emit_warning("replace")?; - } - if !manifest.patch().is_empty() { - emit_warning("patch")?; - } - if let Some(behavior) = manifest.resolve_behavior() { - if behavior != self.resolve_behavior { - // Only warn if they don't match. - emit_warning("resolver")?; - } - } - } - } - Ok(()) - } - - pub fn load(&self, manifest_path: &Path) -> CargoResult { - match self.packages.maybe_get(manifest_path) { - Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()), - Some(&MaybePackage::Virtual(_)) => bail!("cannot load workspace root"), - None => {} - } - - let mut loaded = self.loaded_packages.borrow_mut(); - if let Some(p) = loaded.get(manifest_path).cloned() { - return Ok(p); - } - let source_id = SourceId::for_path(manifest_path.parent().unwrap())?; - let (package, _nested_paths) = ops::read_package(manifest_path, source_id, self.config)?; - loaded.insert(manifest_path.to_path_buf(), package.clone()); - Ok(package) - } - - /// Preload the provided registry with already loaded packages. - /// - /// A workspace may load packages during construction/parsing/early phases - /// for various operations, and this preload step avoids doubly-loading and - /// parsing crates on the filesystem by inserting them all into the registry - /// with their in-memory formats. - pub fn preload(&self, registry: &mut PackageRegistry<'cfg>) { - // These can get weird as this generally represents a workspace during - // `cargo install`. Things like git repositories will actually have a - // `PathSource` with multiple entries in it, so the logic below is - // mostly just an optimization for normal `cargo build` in workspaces - // during development. - if self.is_ephemeral { - return; - } - - for pkg in self.packages.packages.values() { - let pkg = match *pkg { - MaybePackage::Package(ref p) => p.clone(), - MaybePackage::Virtual(_) => continue, - }; - let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), self.config); - src.preload_with(pkg); - registry.add_preloaded(Box::new(src)); - } - } - - pub fn emit_warnings(&self) -> CargoResult<()> { - for (path, maybe_pkg) in &self.packages.packages { - let warnings = match maybe_pkg { - MaybePackage::Package(pkg) => pkg.manifest().warnings().warnings(), - MaybePackage::Virtual(vm) => vm.warnings().warnings(), - }; - let path = path.join("Cargo.toml"); - for warning in warnings { - if warning.is_critical { - let err = anyhow::format_err!("{}", warning.message); - let cx = - anyhow::format_err!("failed to parse manifest at `{}`", path.display()); - return Err(err.context(cx)); - } else { - let msg = if self.root_manifest.is_none() { - warning.message.to_string() - } else { - // In a workspace, it can be confusing where a warning - // originated, so include the path. - format!("{}: {}", path.display(), warning.message) - }; - self.config.shell().warn(msg)? - } - } - } - Ok(()) - } - - pub fn set_target_dir(&mut self, target_dir: Filesystem) { - self.target_dir = Some(target_dir); - } - - /// Returns a Vec of `(&Package, RequestedFeatures)` tuples that - /// represent the workspace members that were requested on the command-line. - /// - /// `specs` may be empty, which indicates it should return all workspace - /// members. In this case, `requested_features.all_features` must be - /// `true`. This is used for generating `Cargo.lock`, which must include - /// all members with all features enabled. - pub fn members_with_features( - &self, - specs: &[PackageIdSpec], - cli_features: &CliFeatures, - ) -> CargoResult> { - assert!( - !specs.is_empty() || cli_features.all_features, - "no specs requires all_features" - ); - if specs.is_empty() { - // When resolving the entire workspace, resolve each member with - // all features enabled. - return Ok(self - .members() - .map(|m| (m, CliFeatures::new_all(true))) - .collect()); - } - if self.allows_new_cli_feature_behavior() { - self.members_with_features_new(specs, cli_features) - } else { - Ok(self.members_with_features_old(specs, cli_features)) - } - } - - /// Returns the requested features for the given member. - /// This filters out any named features that the member does not have. - fn collect_matching_features( - member: &Package, - cli_features: &CliFeatures, - found_features: &mut BTreeSet, - ) -> CliFeatures { - if cli_features.features.is_empty() || cli_features.all_features { - return cli_features.clone(); - } - - // Only include features this member defines. - let summary = member.summary(); - - // Features defined in the manifest - let summary_features = summary.features(); - - // Dependency name -> dependency - let dependencies: BTreeMap = summary - .dependencies() - .iter() - .map(|dep| (dep.name_in_toml(), dep)) - .collect(); - - // Features that enable optional dependencies - let optional_dependency_names: BTreeSet<_> = dependencies - .iter() - .filter(|(_, dep)| dep.is_optional()) - .map(|(name, _)| name) - .copied() - .collect(); - - let mut features = BTreeSet::new(); - - // Checks if a member contains the given feature. - let summary_or_opt_dependency_feature = |feature: &InternedString| -> bool { - summary_features.contains_key(feature) || optional_dependency_names.contains(feature) - }; - - for feature in cli_features.features.iter() { - match feature { - FeatureValue::Feature(f) => { - if summary_or_opt_dependency_feature(f) { - // feature exists in this member. - features.insert(feature.clone()); - found_features.insert(feature.clone()); - } - } - // This should be enforced by CliFeatures. - FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature), - FeatureValue::DepFeature { - dep_name, - dep_feature, - weak: _, - } => { - if dependencies.contains_key(dep_name) { - // pkg/feat for a dependency. - // Will rely on the dependency resolver to validate `dep_feature`. - features.insert(feature.clone()); - found_features.insert(feature.clone()); - } else if *dep_name == member.name() - && summary_or_opt_dependency_feature(dep_feature) - { - // member/feat where "feat" is a feature in member. - // - // `weak` can be ignored here, because the member - // either is or isn't being built. - features.insert(FeatureValue::Feature(*dep_feature)); - found_features.insert(feature.clone()); - } - } - } - } - CliFeatures { - features: Rc::new(features), - all_features: false, - uses_default_features: cli_features.uses_default_features, - } - } - - fn report_unknown_features_error( - &self, - specs: &[PackageIdSpec], - cli_features: &CliFeatures, - found_features: &BTreeSet, - ) -> CargoResult<()> { - // Keeps track of which features were contained in summary of `member` to suggest similar features in errors - let mut summary_features: Vec = Default::default(); - - // Keeps track of `member` dependencies (`dep/feature`) and their features names to suggest similar features in error - let mut dependencies_features: BTreeMap = - Default::default(); - - // Keeps track of `member` optional dependencies names (which can be enabled with feature) to suggest similar features in error - let mut optional_dependency_names: Vec = Default::default(); - - // Keeps track of which features were contained in summary of `member` to suggest similar features in errors - let mut summary_features_per_member: BTreeMap<&Package, BTreeSet> = - Default::default(); - - // Keeps track of `member` optional dependencies (which can be enabled with feature) to suggest similar features in error - let mut optional_dependency_names_per_member: BTreeMap<&Package, BTreeSet> = - Default::default(); - - for member in self - .members() - .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))) - { - // Only include features this member defines. - let summary = member.summary(); - - // Features defined in the manifest - summary_features.extend(summary.features().keys()); - summary_features_per_member - .insert(member, summary.features().keys().copied().collect()); - - // Dependency name -> dependency - let dependencies: BTreeMap = summary - .dependencies() - .iter() - .map(|dep| (dep.name_in_toml(), dep)) - .collect(); - - dependencies_features.extend( - dependencies - .iter() - .map(|(name, dep)| (*name, dep.features())), - ); - - // Features that enable optional dependencies - let optional_dependency_names_raw: BTreeSet<_> = dependencies - .iter() - .filter(|(_, dep)| dep.is_optional()) - .map(|(name, _)| name) - .copied() - .collect(); - - optional_dependency_names.extend(optional_dependency_names_raw.iter()); - optional_dependency_names_per_member.insert(member, optional_dependency_names_raw); - } - - let levenshtein_test = - |a: InternedString, b: InternedString| lev_distance(a.as_str(), b.as_str()) < 4; - - let suggestions: Vec<_> = cli_features - .features - .difference(found_features) - .map(|feature| match feature { - // Simple feature, check if any of the optional dependency features or member features are close enough - FeatureValue::Feature(typo) => { - // Finds member features which are similar to the requested feature. - let summary_features = summary_features - .iter() - .filter(move |feature| levenshtein_test(**feature, *typo)); - - // Finds optional dependencies which name is similar to the feature - let optional_dependency_features = optional_dependency_names - .iter() - .filter(move |feature| levenshtein_test(**feature, *typo)); - - summary_features - .chain(optional_dependency_features) - .map(|s| s.to_string()) - .collect::>() - } - FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature), - FeatureValue::DepFeature { - dep_name, - dep_feature, - weak: _, - } => { - // Finds set of `pkg/feat` that are very similar to current `pkg/feat`. - let pkg_feat_similar = dependencies_features - .iter() - .filter(|(name, _)| levenshtein_test(**name, *dep_name)) - .map(|(name, features)| { - ( - name, - features - .iter() - .filter(|feature| levenshtein_test(**feature, *dep_feature)) - .collect::>(), - ) - }) - .map(|(name, features)| { - features - .into_iter() - .map(move |feature| format!("{}/{}", name, feature)) - }) - .flatten(); - - // Finds set of `member/optional_dep` features which name is similar to current `pkg/feat`. - let optional_dependency_features = optional_dependency_names_per_member - .iter() - .filter(|(package, _)| levenshtein_test(package.name(), *dep_name)) - .map(|(package, optional_dependencies)| { - optional_dependencies - .into_iter() - .filter(|optional_dependency| { - levenshtein_test(**optional_dependency, *dep_name) - }) - .map(move |optional_dependency| { - format!("{}/{}", package.name(), optional_dependency) - }) - }) - .flatten(); - - // Finds set of `member/feat` features which name is similar to current `pkg/feat`. - let summary_features = summary_features_per_member - .iter() - .filter(|(package, _)| levenshtein_test(package.name(), *dep_name)) - .map(|(package, summary_features)| { - summary_features - .into_iter() - .filter(|summary_feature| { - levenshtein_test(**summary_feature, *dep_feature) - }) - .map(move |summary_feature| { - format!("{}/{}", package.name(), summary_feature) - }) - }) - .flatten(); - - pkg_feat_similar - .chain(optional_dependency_features) - .chain(summary_features) - .collect::>() - } - }) - .map(|v| v.into_iter()) - .flatten() - .unique() - .filter(|element| { - let feature = FeatureValue::new(InternedString::new(element)); - !cli_features.features.contains(&feature) && !found_features.contains(&feature) - }) - .sorted() - .take(5) - .collect(); - - let unknown: Vec<_> = cli_features - .features - .difference(found_features) - .map(|feature| feature.to_string()) - .sorted() - .collect(); - - if suggestions.is_empty() { - bail!( - "none of the selected packages contains these features: {}", - unknown.join(", ") - ); - } else { - bail!( - "none of the selected packages contains these features: {}, did you mean: {}?", - unknown.join(", "), - suggestions.join(", ") - ); - } - } - - /// New command-line feature selection behavior with resolver = "2" or the - /// root of a virtual workspace. See `allows_new_cli_feature_behavior`. - fn members_with_features_new( - &self, - specs: &[PackageIdSpec], - cli_features: &CliFeatures, - ) -> CargoResult> { - // Keeps track of which features matched `member` to produce an error - // if any of them did not match anywhere. - let mut found_features = Default::default(); - - let members: Vec<(&Package, CliFeatures)> = self - .members() - .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))) - .map(|m| { - ( - m, - Workspace::collect_matching_features(m, cli_features, &mut found_features), - ) - }) - .collect(); - - if members.is_empty() { - // `cargo build -p foo`, where `foo` is not a member. - // Do not allow any command-line flags (defaults only). - if !(cli_features.features.is_empty() - && !cli_features.all_features - && cli_features.uses_default_features) - { - bail!("cannot specify features for packages outside of workspace"); - } - // Add all members from the workspace so we can ensure `-p nonmember` - // is in the resolve graph. - return Ok(self - .members() - .map(|m| (m, CliFeatures::new_all(false))) - .collect()); - } - if *cli_features.features != found_features { - self.report_unknown_features_error(specs, cli_features, &found_features)?; - } - Ok(members) - } - - /// This is the "old" behavior for command-line feature selection. - /// See `allows_new_cli_feature_behavior`. - fn members_with_features_old( - &self, - specs: &[PackageIdSpec], - cli_features: &CliFeatures, - ) -> Vec<(&Package, CliFeatures)> { - // Split off any features with the syntax `member-name/feature-name` into a map - // so that those features can be applied directly to those workspace-members. - let mut member_specific_features: HashMap> = - HashMap::new(); - // Features for the member in the current directory. - let mut cwd_features = BTreeSet::new(); - for feature in cli_features.features.iter() { - match feature { - FeatureValue::Feature(_) => { - cwd_features.insert(feature.clone()); - } - // This should be enforced by CliFeatures. - FeatureValue::Dep { .. } => panic!("unexpected dep: syntax {}", feature), - FeatureValue::DepFeature { - dep_name, - dep_feature, - weak: _, - } => { - // I think weak can be ignored here. - // * With `--features member?/feat -p member`, the ? doesn't - // really mean anything (either the member is built or it isn't). - // * With `--features nonmember?/feat`, cwd_features will - // handle processing it correctly. - let is_member = self.members().any(|member| { - // Check if `dep_name` is member of the workspace, but isn't associated with current package. - self.current_opt() != Some(member) && member.name() == *dep_name - }); - if is_member && specs.iter().any(|spec| spec.name() == *dep_name) { - member_specific_features - .entry(*dep_name) - .or_default() - .insert(FeatureValue::Feature(*dep_feature)); - } else { - cwd_features.insert(feature.clone()); - } - } - } - } - - let ms: Vec<_> = self - .members() - .filter_map(|member| { - let member_id = member.package_id(); - match self.current_opt() { - // The features passed on the command-line only apply to - // the "current" package (determined by the cwd). - Some(current) if member_id == current.package_id() => { - let feats = CliFeatures { - features: Rc::new(cwd_features.clone()), - all_features: cli_features.all_features, - uses_default_features: cli_features.uses_default_features, - }; - Some((member, feats)) - } - _ => { - // Ignore members that are not enabled on the command-line. - if specs.iter().any(|spec| spec.matches(member_id)) { - // -p for a workspace member that is not the "current" - // one. - // - // The odd behavior here is due to backwards - // compatibility. `--features` and - // `--no-default-features` used to only apply to the - // "current" package. As an extension, this allows - // member-name/feature-name to set member-specific - // features, which should be backwards-compatible. - let feats = CliFeatures { - features: Rc::new( - member_specific_features - .remove(member.name().as_str()) - .unwrap_or_default(), - ), - uses_default_features: true, - all_features: cli_features.all_features, - }; - Some((member, feats)) - } else { - // This member was not requested on the command-line, skip. - None - } - } - } - }) - .collect(); - - // If any member specific features were not removed while iterating over members - // some features will be ignored. - assert!(member_specific_features.is_empty()); - - ms - } -} - -impl<'cfg> Packages<'cfg> { - fn get(&self, manifest_path: &Path) -> &MaybePackage { - self.maybe_get(manifest_path).unwrap() - } - - fn get_mut(&mut self, manifest_path: &Path) -> &mut MaybePackage { - self.maybe_get_mut(manifest_path).unwrap() - } - - fn maybe_get(&self, manifest_path: &Path) -> Option<&MaybePackage> { - self.packages.get(manifest_path.parent().unwrap()) - } - - fn maybe_get_mut(&mut self, manifest_path: &Path) -> Option<&mut MaybePackage> { - self.packages.get_mut(manifest_path.parent().unwrap()) - } - - fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> { - let key = manifest_path.parent().unwrap(); - match self.packages.entry(key.to_path_buf()) { - Entry::Occupied(e) => Ok(e.into_mut()), - Entry::Vacant(v) => { - let source_id = SourceId::for_path(key)?; - let (manifest, _nested_paths) = - read_manifest(manifest_path, source_id, self.config)?; - Ok(v.insert(match manifest { - EitherManifest::Real(manifest) => { - MaybePackage::Package(Package::new(manifest, manifest_path)) - } - EitherManifest::Virtual(vm) => MaybePackage::Virtual(vm), - })) - } - } - } -} - -impl MaybePackage { - fn workspace_config(&self) -> &WorkspaceConfig { - match *self { - MaybePackage::Package(ref p) => p.manifest().workspace_config(), - MaybePackage::Virtual(ref vm) => vm.workspace_config(), - } - } -} - -impl WorkspaceRootConfig { - /// Creates a new Intermediate Workspace Root configuration. - pub fn new( - root_dir: &Path, - members: &Option>, - default_members: &Option>, - exclude: &Option>, - custom_metadata: &Option, - ) -> WorkspaceRootConfig { - WorkspaceRootConfig { - root_dir: root_dir.to_path_buf(), - members: members.clone(), - default_members: default_members.clone(), - exclude: exclude.clone().unwrap_or_default(), - custom_metadata: custom_metadata.clone(), - } - } - - /// Checks the path against the `excluded` list. - /// - /// This method does **not** consider the `members` list. - fn is_excluded(&self, manifest_path: &Path) -> bool { - let excluded = self - .exclude - .iter() - .any(|ex| manifest_path.starts_with(self.root_dir.join(ex))); - - let explicit_member = match self.members { - Some(ref members) => members - .iter() - .any(|mem| manifest_path.starts_with(self.root_dir.join(mem))), - None => false, - }; - - !explicit_member && excluded - } - - fn has_members_list(&self) -> bool { - self.members.is_some() - } - - fn members_paths(&self, globs: &[String]) -> CargoResult> { - let mut expanded_list = Vec::new(); - - for glob in globs { - let pathbuf = self.root_dir.join(glob); - let expanded_paths = Self::expand_member_path(&pathbuf)?; - - // If glob does not find any valid paths, then put the original - // path in the expanded list to maintain backwards compatibility. - if expanded_paths.is_empty() { - expanded_list.push(pathbuf); - } else { - // Some OS can create system support files anywhere. - // (e.g. macOS creates `.DS_Store` file if you visit a directory using Finder.) - // Such files can be reported as a member path unexpectedly. - // Check and filter out non-directory paths to prevent pushing such accidental unwanted path - // as a member. - for expanded_path in expanded_paths { - if expanded_path.is_dir() { - expanded_list.push(expanded_path); - } - } - } - } - - Ok(expanded_list) - } - - fn expand_member_path(path: &Path) -> CargoResult> { - let path = match path.to_str() { - Some(p) => p, - None => return Ok(Vec::new()), - }; - let res = glob(path).with_context(|| format!("could not parse pattern `{}`", &path))?; - let res = res - .map(|p| p.with_context(|| format!("unable to match path to pattern `{}`", &path))) - .collect::, _>>()?; - Ok(res) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/lib.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/lib.rs deleted file mode 100644 index e4178fd16..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/lib.rs +++ /dev/null @@ -1,103 +0,0 @@ -// For various reasons, some idioms are still allow'ed, but we would like to -// test and enforce them. -#![warn(rust_2018_idioms)] -#![cfg_attr(test, deny(warnings))] -// Due to some of the default clippy lints being somewhat subjective and not -// necessarily an improvement, we prefer to not use them at this time. -#![allow(clippy::all)] -#![warn(clippy::needless_borrow)] -#![warn(clippy::redundant_clone)] - -use crate::core::shell::Verbosity::Verbose; -use crate::core::Shell; -use anyhow::Error; -use log::debug; - -pub use crate::util::errors::{InternalError, VerboseError}; -pub use crate::util::{indented_lines, CargoResult, CliError, CliResult, Config}; -pub use crate::version::version; - -pub const CARGO_ENV: &str = "CARGO"; - -#[macro_use] -mod macros; - -pub mod core; -pub mod ops; -pub mod sources; -pub mod util; -mod version; - -pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! { - debug!("exit_with_error; err={:?}", err); - if let Some(ref err) = err.error { - if let Some(clap_err) = err.downcast_ref::() { - clap_err.exit() - } - } - - let CliError { error, exit_code } = err; - if let Some(error) = error { - display_error(&error, shell); - } - - std::process::exit(exit_code) -} - -/// Displays an error, and all its causes, to stderr. -pub fn display_error(err: &Error, shell: &mut Shell) { - debug!("display_error; err={:?}", err); - _display_error(err, shell, true); - if err - .chain() - .any(|e| e.downcast_ref::().is_some()) - { - drop(shell.note("this is an unexpected cargo internal error")); - drop( - shell.note( - "we would appreciate a bug report: https://github.com/rust-lang/cargo/issues/", - ), - ); - drop(shell.note(format!("cargo {}", version()))); - // Once backtraces are stabilized, this should print out a backtrace - // if it is available. - } -} - -/// Displays a warning, with an error object providing detailed information -/// and context. -pub fn display_warning_with_error(warning: &str, err: &Error, shell: &mut Shell) { - drop(shell.warn(warning)); - drop(writeln!(shell.err())); - _display_error(err, shell, false); -} - -fn _display_error(err: &Error, shell: &mut Shell, as_err: bool) -> bool { - let verbosity = shell.verbosity(); - let is_verbose = |e: &(dyn std::error::Error + 'static)| -> bool { - verbosity != Verbose && e.downcast_ref::().is_some() - }; - // Generally the top error shouldn't be verbose, but check it anyways. - if is_verbose(err.as_ref()) { - return true; - } - if as_err { - drop(shell.error(&err)); - } else { - drop(writeln!(shell.err(), "{}", err)); - } - for cause in err.chain().skip(1) { - // If we're not in verbose mode then print remaining errors until one - // marked as `VerboseError` appears. - if is_verbose(cause) { - return true; - } - drop(writeln!(shell.err(), "\nCaused by:")); - drop(write!( - shell.err(), - "{}", - indented_lines(&cause.to_string()) - )); - } - false -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/macros.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/macros.rs deleted file mode 100644 index 3ebf3b37f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/macros.rs +++ /dev/null @@ -1,49 +0,0 @@ -use std::fmt; - -macro_rules! compact_debug { - ( - impl fmt::Debug for $ty:ident { - fn fmt(&$this:ident, f: &mut fmt::Formatter) -> fmt::Result { - let (default, default_name) = $e:expr; - [debug_the_fields($($field:ident)*)] - } - } - ) => ( - - impl fmt::Debug for $ty { - fn fmt(&$this, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Try printing a pretty version where we collapse as many fields as - // possible, indicating that they're equivalent to a function call - // that's hopefully enough to indicate what each value is without - // actually dumping everything so verbosely. - let mut s = f.debug_struct(stringify!($ty)); - let (default, default_name) = $e; - let mut any_default = false; - - // Exhaustively match so when fields are added we get a compile - // failure - let $ty { $($field),* } = $this; - $( - if *$field == default.$field { - any_default = true; - } else { - s.field(stringify!($field), $field); - } - )* - - if any_default { - s.field("..", &crate::macros::DisplayAsDebug(default_name)); - } - s.finish() - } - } - ) -} - -pub struct DisplayAsDebug(pub T); - -impl fmt::Debug for DisplayAsDebug { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.0, f) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_clean.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_clean.rs deleted file mode 100644 index 1320efac3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_clean.rs +++ /dev/null @@ -1,248 +0,0 @@ -use crate::core::compiler::{CompileKind, CompileMode, Layout, RustcTargetData}; -use crate::core::profiles::Profiles; -use crate::core::{PackageIdSpec, TargetKind, Workspace}; -use crate::ops; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::lev_distance; -use crate::util::Config; - -use anyhow::Context as _; -use cargo_util::paths; -use std::fs; -use std::path::Path; - -pub struct CleanOptions<'a> { - pub config: &'a Config, - /// A list of packages to clean. If empty, everything is cleaned. - pub spec: Vec, - /// The target arch triple to clean, or None for the host arch - pub targets: Vec, - /// Whether to clean the release directory - pub profile_specified: bool, - /// Whether to clean the directory of a certain build profile - pub requested_profile: InternedString, - /// Whether to just clean the doc directory - pub doc: bool, -} - -/// Cleans the package's build artifacts. -pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { - let mut target_dir = ws.target_dir(); - let config = ws.config(); - - // If the doc option is set, we just want to delete the doc directory. - if opts.doc { - target_dir = target_dir.join("doc"); - return rm_rf(&target_dir.into_path_unlocked(), config); - } - - let profiles = Profiles::new(ws, opts.requested_profile)?; - - if opts.profile_specified { - // After parsing profiles we know the dir-name of the profile, if a profile - // was passed from the command line. If so, delete only the directory of - // that profile. - let dir_name = profiles.get_dir_name(); - target_dir = target_dir.join(dir_name); - } - - // If we have a spec, then we need to delete some packages, otherwise, just - // remove the whole target directory and be done with it! - // - // Note that we don't bother grabbing a lock here as we're just going to - // blow it all away anyway. - if opts.spec.is_empty() { - return rm_rf(&target_dir.into_path_unlocked(), config); - } - - // Clean specific packages. - let requested_kinds = CompileKind::from_requested_targets(config, &opts.targets)?; - let target_data = RustcTargetData::new(ws, &requested_kinds)?; - let (pkg_set, resolve) = ops::resolve_ws(ws)?; - let prof_dir_name = profiles.get_dir_name(); - let host_layout = Layout::new(ws, None, &prof_dir_name)?; - // Convert requested kinds to a Vec of layouts. - let target_layouts: Vec<(CompileKind, Layout)> = requested_kinds - .into_iter() - .filter_map(|kind| match kind { - CompileKind::Target(target) => match Layout::new(ws, Some(target), &prof_dir_name) { - Ok(layout) => Some(Ok((kind, layout))), - Err(e) => Some(Err(e)), - }, - CompileKind::Host => None, - }) - .collect::>()?; - // A Vec of layouts. This is a little convoluted because there can only be - // one host_layout. - let layouts = if opts.targets.is_empty() { - vec![(CompileKind::Host, &host_layout)] - } else { - target_layouts - .iter() - .map(|(kind, layout)| (*kind, layout)) - .collect() - }; - // Create a Vec that also includes the host for things that need to clean both. - let layouts_with_host: Vec<(CompileKind, &Layout)> = - std::iter::once((CompileKind::Host, &host_layout)) - .chain(layouts.iter().map(|(k, l)| (*k, *l))) - .collect(); - - // Cleaning individual rustdoc crates is currently not supported. - // For example, the search index would need to be rebuilt to fully - // remove it (otherwise you're left with lots of broken links). - // Doc tests produce no output. - - // Get Packages for the specified specs. - let mut pkg_ids = Vec::new(); - for spec_str in opts.spec.iter() { - // Translate the spec to a Package. - let spec = PackageIdSpec::parse(spec_str)?; - if spec.version().is_some() { - config.shell().warn(&format!( - "version qualifier in `-p {}` is ignored, \ - cleaning all versions of `{}` found", - spec_str, - spec.name() - ))?; - } - if spec.url().is_some() { - config.shell().warn(&format!( - "url qualifier in `-p {}` ignored, \ - cleaning all versions of `{}` found", - spec_str, - spec.name() - ))?; - } - let matches: Vec<_> = resolve.iter().filter(|id| spec.matches(*id)).collect(); - if matches.is_empty() { - let mut suggestion = String::new(); - suggestion.push_str(&lev_distance::closest_msg( - &spec.name(), - resolve.iter(), - |id| id.name().as_str(), - )); - anyhow::bail!( - "package ID specification `{}` did not match any packages{}", - spec, - suggestion - ); - } - pkg_ids.extend(matches); - } - let packages = pkg_set.get_many(pkg_ids)?; - - for pkg in packages { - let pkg_dir = format!("{}-*", pkg.name()); - - // Clean fingerprints. - for (_, layout) in &layouts_with_host { - let dir = escape_glob_path(layout.fingerprint())?; - rm_rf_glob(&Path::new(&dir).join(&pkg_dir), config)?; - } - - for target in pkg.targets() { - if target.is_custom_build() { - // Get both the build_script_build and the output directory. - for (_, layout) in &layouts_with_host { - let dir = escape_glob_path(layout.build())?; - rm_rf_glob(&Path::new(&dir).join(&pkg_dir), config)?; - } - continue; - } - let crate_name = target.crate_name(); - for &mode in &[ - CompileMode::Build, - CompileMode::Test, - CompileMode::Check { test: false }, - ] { - for (compile_kind, layout) in &layouts { - let triple = target_data.short_name(compile_kind); - - let (file_types, _unsupported) = target_data - .info(*compile_kind) - .rustc_outputs(mode, target.kind(), triple)?; - let (dir, uplift_dir) = match target.kind() { - TargetKind::ExampleBin | TargetKind::ExampleLib(..) => { - (layout.examples(), Some(layout.examples())) - } - // Tests/benchmarks are never uplifted. - TargetKind::Test | TargetKind::Bench => (layout.deps(), None), - _ => (layout.deps(), Some(layout.dest())), - }; - for file_type in file_types { - // Some files include a hash in the filename, some don't. - let hashed_name = file_type.output_filename(target, Some("*")); - let unhashed_name = file_type.output_filename(target, None); - let dir_glob = escape_glob_path(dir)?; - let dir_glob = Path::new(&dir_glob); - - rm_rf_glob(&dir_glob.join(&hashed_name), config)?; - rm_rf(&dir.join(&unhashed_name), config)?; - // Remove dep-info file generated by rustc. It is not tracked in - // file_types. It does not have a prefix. - let hashed_dep_info = dir_glob.join(format!("{}-*.d", crate_name)); - rm_rf_glob(&hashed_dep_info, config)?; - let unhashed_dep_info = dir.join(format!("{}.d", crate_name)); - rm_rf(&unhashed_dep_info, config)?; - // Remove split-debuginfo files generated by rustc. - let split_debuginfo_obj = dir_glob.join(format!("{}.*.o", crate_name)); - rm_rf_glob(&split_debuginfo_obj, config)?; - let split_debuginfo_dwo = dir_glob.join(format!("{}.*.dwo", crate_name)); - rm_rf_glob(&split_debuginfo_dwo, config)?; - - // Remove the uplifted copy. - if let Some(uplift_dir) = uplift_dir { - let uplifted_path = uplift_dir.join(file_type.uplift_filename(target)); - rm_rf(&uplifted_path, config)?; - // Dep-info generated by Cargo itself. - let dep_info = uplifted_path.with_extension("d"); - rm_rf(&dep_info, config)?; - } - } - // TODO: what to do about build_script_build? - let dir = escape_glob_path(layout.incremental())?; - let incremental = Path::new(&dir).join(format!("{}-*", crate_name)); - rm_rf_glob(&incremental, config)?; - } - } - } - } - - Ok(()) -} - -fn escape_glob_path(pattern: &Path) -> CargoResult { - let pattern = pattern - .to_str() - .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?; - Ok(glob::Pattern::escape(pattern)) -} - -fn rm_rf_glob(pattern: &Path, config: &Config) -> CargoResult<()> { - // TODO: Display utf8 warning to user? Or switch to globset? - let pattern = pattern - .to_str() - .ok_or_else(|| anyhow::anyhow!("expected utf-8 path"))?; - for path in glob::glob(pattern)? { - rm_rf(&path?, config)?; - } - Ok(()) -} - -fn rm_rf(path: &Path, config: &Config) -> CargoResult<()> { - let m = fs::symlink_metadata(path); - if m.as_ref().map(|s| s.is_dir()).unwrap_or(false) { - config - .shell() - .verbose(|shell| shell.status("Removing", path.display()))?; - paths::remove_dir_all(path).with_context(|| "could not remove build directory")?; - } else if m.is_ok() { - config - .shell() - .verbose(|shell| shell.status("Removing", path.display()))?; - paths::remove_file(path).with_context(|| "failed to remove build artifact")?; - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_compile.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_compile.rs deleted file mode 100644 index fc78dbfc6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_compile.rs +++ /dev/null @@ -1,1823 +0,0 @@ -//! The Cargo "compile" operation. -//! -//! This module contains the entry point for starting the compilation process -//! for commands like `build`, `test`, `doc`, `rustc`, etc. -//! -//! The `compile` function will do all the work to compile a workspace. A -//! rough outline is: -//! -//! - Resolve the dependency graph (see `ops::resolve`). -//! - Download any packages needed (see `PackageSet`). -//! - Generate a list of top-level "units" of work for the targets the user -//! requested on the command-line. Each `Unit` corresponds to a compiler -//! invocation. This is done in this module (`generate_targets`). -//! - Build the graph of `Unit` dependencies (see -//! `core::compiler::context::unit_dependencies`). -//! - Create a `Context` which will perform the following steps: -//! - Prepare the `target` directory (see `Layout`). -//! - Create a job queue (see `JobQueue`). The queue checks the -//! fingerprint of each `Unit` to determine if it should run or be -//! skipped. -//! - Execute the queue. Each leaf in the queue's dependency graph is -//! executed, and then removed from the graph when finished. This -//! repeats until the queue is empty. - -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::hash::{Hash, Hasher}; -use std::sync::Arc; - -use crate::core::compiler::unit_dependencies::build_unit_dependencies; -use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph}; -use crate::core::compiler::{standard_lib, TargetInfo}; -use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; -use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; -use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; -use crate::core::profiles::{Profiles, UnitFor}; -use crate::core::resolver::features::{self, CliFeatures, FeaturesFor}; -use crate::core::resolver::{HasDevUnits, Resolve}; -use crate::core::{FeatureValue, Package, PackageSet, Shell, Summary, Target}; -use crate::core::{PackageId, PackageIdSpec, SourceId, TargetKind, Workspace}; -use crate::drop_println; -use crate::ops; -use crate::ops::resolve::WorkspaceResolve; -use crate::util::config::Config; -use crate::util::interning::InternedString; -use crate::util::restricted_names::is_glob_pattern; -use crate::util::{closest_msg, profile, CargoResult, StableHasher}; - -use anyhow::{bail, Context as _}; - -/// Contains information about how a package should be compiled. -/// -/// Note on distinction between `CompileOptions` and `BuildConfig`: -/// `BuildConfig` contains values that need to be retained after -/// `BuildContext` is created. The other fields are no longer necessary. Think -/// of it as `CompileOptions` are high-level settings requested on the -/// command-line, and `BuildConfig` are low-level settings for actually -/// driving `rustc`. -#[derive(Debug)] -pub struct CompileOptions { - /// Configuration information for a rustc build - pub build_config: BuildConfig, - /// Feature flags requested by the user. - pub cli_features: CliFeatures, - /// A set of packages to build. - pub spec: Packages, - /// Filter to apply to the root package to select which targets will be - /// built. - pub filter: CompileFilter, - /// Extra arguments to be passed to rustdoc (single target only) - pub target_rustdoc_args: Option>, - /// The specified target will be compiled with all the available arguments, - /// note that this only accounts for the *final* invocation of rustc - pub target_rustc_args: Option>, - /// Crate types to be passed to rustc (single target only) - pub target_rustc_crate_types: Option>, - /// Extra arguments passed to all selected targets for rustdoc. - pub local_rustdoc_args: Option>, - /// Whether the `--document-private-items` flags was specified and should - /// be forwarded to `rustdoc`. - pub rustdoc_document_private_items: bool, - /// Whether the build process should check the minimum Rust version - /// defined in the cargo metadata for a crate. - pub honor_rust_version: bool, -} - -impl CompileOptions { - pub fn new(config: &Config, mode: CompileMode) -> CargoResult { - Ok(CompileOptions { - build_config: BuildConfig::new(config, None, &[], mode)?, - cli_features: CliFeatures::new_all(false), - spec: ops::Packages::Packages(Vec::new()), - filter: CompileFilter::Default { - required_features_filterable: false, - }, - target_rustdoc_args: None, - target_rustc_args: None, - target_rustc_crate_types: None, - local_rustdoc_args: None, - rustdoc_document_private_items: false, - honor_rust_version: true, - }) - } -} - -#[derive(PartialEq, Eq, Debug)] -pub enum Packages { - Default, - All, - OptOut(Vec), - Packages(Vec), -} - -impl Packages { - pub fn from_flags(all: bool, exclude: Vec, package: Vec) -> CargoResult { - Ok(match (all, exclude.len(), package.len()) { - (false, 0, 0) => Packages::Default, - (false, 0, _) => Packages::Packages(package), - (false, _, _) => anyhow::bail!("--exclude can only be used together with --workspace"), - (true, 0, _) => Packages::All, - (true, _, _) => Packages::OptOut(exclude), - }) - } - - /// Converts selected packages from a workspace to `PackageIdSpec`s. - pub fn to_package_id_specs(&self, ws: &Workspace<'_>) -> CargoResult> { - let specs = match self { - Packages::All => ws - .members() - .map(Package::package_id) - .map(PackageIdSpec::from_package_id) - .collect(), - Packages::OptOut(opt_out) => { - let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?; - let specs = ws - .members() - .filter(|pkg| { - !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns) - }) - .map(Package::package_id) - .map(PackageIdSpec::from_package_id) - .collect(); - let warn = |e| ws.config().shell().warn(e); - emit_package_not_found(ws, names, true).or_else(warn)?; - emit_pattern_not_found(ws, patterns, true).or_else(warn)?; - specs - } - Packages::Packages(packages) if packages.is_empty() => { - vec![PackageIdSpec::from_package_id(ws.current()?.package_id())] - } - Packages::Packages(opt_in) => { - let (mut patterns, packages) = opt_patterns_and_names(opt_in)?; - let mut specs = packages - .iter() - .map(|p| PackageIdSpec::parse(p)) - .collect::>>()?; - if !patterns.is_empty() { - let matched_pkgs = ws - .members() - .filter(|pkg| match_patterns(pkg, &mut patterns)) - .map(Package::package_id) - .map(PackageIdSpec::from_package_id); - specs.extend(matched_pkgs); - } - emit_pattern_not_found(ws, patterns, false)?; - specs - } - Packages::Default => ws - .default_members() - .map(Package::package_id) - .map(PackageIdSpec::from_package_id) - .collect(), - }; - if specs.is_empty() { - if ws.is_virtual() { - anyhow::bail!( - "manifest path `{}` contains no package: The manifest is virtual, \ - and the workspace has no members.", - ws.root().display() - ) - } - anyhow::bail!("no packages to compile") - } - Ok(specs) - } - - /// Gets a list of selected packages from a workspace. - pub fn get_packages<'ws>(&self, ws: &'ws Workspace<'_>) -> CargoResult> { - let packages: Vec<_> = match self { - Packages::Default => ws.default_members().collect(), - Packages::All => ws.members().collect(), - Packages::OptOut(opt_out) => { - let (mut patterns, mut names) = opt_patterns_and_names(opt_out)?; - let packages = ws - .members() - .filter(|pkg| { - !names.remove(pkg.name().as_str()) && !match_patterns(pkg, &mut patterns) - }) - .collect(); - emit_package_not_found(ws, names, true)?; - emit_pattern_not_found(ws, patterns, true)?; - packages - } - Packages::Packages(opt_in) => { - let (mut patterns, mut names) = opt_patterns_and_names(opt_in)?; - let packages = ws - .members() - .filter(|pkg| { - names.remove(pkg.name().as_str()) || match_patterns(pkg, &mut patterns) - }) - .collect(); - emit_package_not_found(ws, names, false)?; - emit_pattern_not_found(ws, patterns, false)?; - packages - } - }; - Ok(packages) - } - - /// Returns whether or not the user needs to pass a `-p` flag to target a - /// specific package in the workspace. - pub fn needs_spec_flag(&self, ws: &Workspace<'_>) -> bool { - match self { - Packages::Default => ws.default_members().count() > 1, - Packages::All => ws.members().count() > 1, - Packages::Packages(_) => true, - Packages::OptOut(_) => true, - } - } -} - -#[derive(Debug, PartialEq, Eq)] -pub enum LibRule { - /// Include the library, fail if not present - True, - /// Include the library if present - Default, - /// Exclude the library - False, -} - -#[derive(Debug)] -pub enum FilterRule { - All, - Just(Vec), -} - -#[derive(Debug)] -pub enum CompileFilter { - Default { - /// Flag whether targets can be safely skipped when required-features are not satisfied. - required_features_filterable: bool, - }, - Only { - all_targets: bool, - lib: LibRule, - bins: FilterRule, - examples: FilterRule, - tests: FilterRule, - benches: FilterRule, - }, -} - -pub fn compile<'a>(ws: &Workspace<'a>, options: &CompileOptions) -> CargoResult> { - let exec: Arc = Arc::new(DefaultExecutor); - compile_with_exec(ws, options, &exec) -} - -/// Like `compile` but allows specifying a custom `Executor` that will be able to intercept build -/// calls and add custom logic. `compile` uses `DefaultExecutor` which just passes calls through. -pub fn compile_with_exec<'a>( - ws: &Workspace<'a>, - options: &CompileOptions, - exec: &Arc, -) -> CargoResult> { - ws.emit_warnings()?; - compile_ws(ws, options, exec) -} - -pub fn compile_ws<'a>( - ws: &Workspace<'a>, - options: &CompileOptions, - exec: &Arc, -) -> CargoResult> { - let interner = UnitInterner::new(); - let bcx = create_bcx(ws, options, &interner)?; - if options.build_config.unit_graph { - unit_graph::emit_serialized_unit_graph(&bcx.roots, &bcx.unit_graph, ws.config())?; - return Compilation::new(&bcx); - } - let _p = profile::start("compiling"); - let cx = Context::new(&bcx)?; - cx.compile(exec) -} - -pub fn print<'a>( - ws: &Workspace<'a>, - options: &CompileOptions, - print_opt_value: &str, -) -> CargoResult<()> { - let CompileOptions { - ref build_config, - ref target_rustc_args, - .. - } = *options; - let config = ws.config(); - let rustc = config.load_global_rustc(Some(ws))?; - for (index, kind) in build_config.requested_kinds.iter().enumerate() { - if index != 0 { - drop_println!(config); - } - let target_info = TargetInfo::new(config, &build_config.requested_kinds, &rustc, *kind)?; - let mut process = rustc.process(); - process.args(&target_info.rustflags); - if let Some(args) = target_rustc_args { - process.args(args); - } - if let CompileKind::Target(t) = kind { - process.arg("--target").arg(t.short_name()); - } - process.arg("--print").arg(print_opt_value); - process.exec()?; - } - Ok(()) -} - -pub fn create_bcx<'a, 'cfg>( - ws: &'a Workspace<'cfg>, - options: &'a CompileOptions, - interner: &'a UnitInterner, -) -> CargoResult> { - let CompileOptions { - ref build_config, - ref spec, - ref cli_features, - ref filter, - ref target_rustdoc_args, - ref target_rustc_args, - ref target_rustc_crate_types, - ref local_rustdoc_args, - rustdoc_document_private_items, - honor_rust_version, - } = *options; - let config = ws.config(); - - // Perform some pre-flight validation. - match build_config.mode { - CompileMode::Test - | CompileMode::Build - | CompileMode::Check { .. } - | CompileMode::Bench - | CompileMode::RunCustomBuild => { - if std::env::var("RUST_FLAGS").is_ok() { - config.shell().warn( - "Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?", - )?; - } - } - CompileMode::Doc { .. } | CompileMode::Doctest | CompileMode::Docscrape => { - if std::env::var("RUSTDOC_FLAGS").is_ok() { - config.shell().warn( - "Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?" - )?; - } - } - } - config.validate_term_config()?; - - let target_data = RustcTargetData::new(ws, &build_config.requested_kinds)?; - - let all_packages = &Packages::All; - let rustdoc_scrape_examples = &config.cli_unstable().rustdoc_scrape_examples; - let need_reverse_dependencies = rustdoc_scrape_examples.is_some(); - let full_specs = if need_reverse_dependencies { - all_packages - } else { - spec - }; - - let resolve_specs = full_specs.to_package_id_specs(ws)?; - let has_dev_units = if filter.need_dev_deps(build_config.mode) || need_reverse_dependencies { - HasDevUnits::Yes - } else { - HasDevUnits::No - }; - let resolve = ops::resolve_ws_with_opts( - ws, - &target_data, - &build_config.requested_kinds, - cli_features, - &resolve_specs, - has_dev_units, - crate::core::resolver::features::ForceAllTargets::No, - )?; - let WorkspaceResolve { - mut pkg_set, - workspace_resolve, - targeted_resolve: resolve, - resolved_features, - } = resolve; - - let std_resolve_features = if let Some(crates) = &config.cli_unstable().build_std { - if build_config.build_plan { - config - .shell() - .warn("-Zbuild-std does not currently fully support --build-plan")?; - } - if build_config.requested_kinds[0].is_host() { - // TODO: This should eventually be fixed. Unfortunately it is not - // easy to get the host triple in BuildConfig. Consider changing - // requested_target to an enum, or some other approach. - anyhow::bail!("-Zbuild-std requires --target"); - } - let (std_package_set, std_resolve, std_features) = - standard_lib::resolve_std(ws, &target_data, &build_config.requested_kinds, crates)?; - pkg_set.add_set(std_package_set); - Some((std_resolve, std_features)) - } else { - None - }; - - // Find the packages in the resolver that the user wants to build (those - // passed in with `-p` or the defaults from the workspace), and convert - // Vec to a Vec. - let specs = if need_reverse_dependencies { - spec.to_package_id_specs(ws)? - } else { - resolve_specs.clone() - }; - let to_build_ids = resolve.specs_to_ids(&specs)?; - // Now get the `Package` for each `PackageId`. This may trigger a download - // if the user specified `-p` for a dependency that is not downloaded. - // Dependencies will be downloaded during build_unit_dependencies. - let mut to_builds = pkg_set.get_many(to_build_ids)?; - - // The ordering here affects some error messages coming out of cargo, so - // let's be test and CLI friendly by always printing in the same order if - // there's an error. - to_builds.sort_by_key(|p| p.package_id()); - - for pkg in to_builds.iter() { - pkg.manifest().print_teapot(config); - - if build_config.mode.is_any_test() - && !ws.is_member(pkg) - && pkg.dependencies().iter().any(|dep| !dep.is_transitive()) - { - anyhow::bail!( - "package `{}` cannot be tested because it requires dev-dependencies \ - and is not a member of the workspace", - pkg.name() - ); - } - } - - let (extra_args, extra_args_name) = match (target_rustc_args, target_rustdoc_args) { - (&Some(ref args), _) => (Some(args.clone()), "rustc"), - (_, &Some(ref args)) => (Some(args.clone()), "rustdoc"), - _ => (None, ""), - }; - - if extra_args.is_some() && to_builds.len() != 1 { - panic!( - "`{}` should not accept multiple `-p` flags", - extra_args_name - ); - } - - let profiles = Profiles::new(ws, build_config.requested_profile)?; - profiles.validate_packages( - ws.profiles(), - &mut config.shell(), - workspace_resolve.as_ref().unwrap_or(&resolve), - )?; - - // If `--target` has not been specified, then the unit graph is built - // assuming `--target $HOST` was specified. See - // `rebuild_unit_graph_shared` for more on why this is done. - let explicit_host_kind = CompileKind::Target(CompileTarget::new(&target_data.rustc.host)?); - let explicit_host_kinds: Vec<_> = build_config - .requested_kinds - .iter() - .map(|kind| match kind { - CompileKind::Host => explicit_host_kind, - CompileKind::Target(t) => CompileKind::Target(*t), - }) - .collect(); - - // Passing `build_config.requested_kinds` instead of - // `explicit_host_kinds` here so that `generate_targets` can do - // its own special handling of `CompileKind::Host`. It will - // internally replace the host kind by the `explicit_host_kind` - // before setting as a unit. - let mut units = generate_targets( - ws, - &to_builds, - filter, - &build_config.requested_kinds, - explicit_host_kind, - build_config.mode, - &resolve, - &workspace_resolve, - &resolved_features, - &pkg_set, - &profiles, - interner, - )?; - - let mut scrape_units = match rustdoc_scrape_examples { - Some(arg) => { - let filter = match arg.as_str() { - "all" => CompileFilter::new_all_targets(), - "examples" => CompileFilter::new( - LibRule::False, - FilterRule::none(), - FilterRule::none(), - FilterRule::All, - FilterRule::none(), - ), - _ => { - bail!( - r#"-Z rustdoc-scrape-examples must take "all" or "examples" as an argument"# - ) - } - }; - let to_build_ids = resolve.specs_to_ids(&resolve_specs)?; - let to_builds = pkg_set.get_many(to_build_ids)?; - let mode = CompileMode::Docscrape; - - generate_targets( - ws, - &to_builds, - &filter, - &build_config.requested_kinds, - explicit_host_kind, - mode, - &resolve, - &workspace_resolve, - &resolved_features, - &pkg_set, - &profiles, - interner, - )? - } - None => Vec::new(), - }; - - let std_roots = if let Some(crates) = &config.cli_unstable().build_std { - // Only build libtest if it looks like it is needed. - let mut crates = crates.clone(); - if !crates.iter().any(|c| c == "test") - && units - .iter() - .any(|unit| unit.mode.is_rustc_test() && unit.target.harness()) - { - // Only build libtest when libstd is built (libtest depends on libstd) - if crates.iter().any(|c| c == "std") { - crates.push("test".to_string()); - } - } - let (std_resolve, std_features) = std_resolve_features.as_ref().unwrap(); - standard_lib::generate_std_roots( - &crates, - std_resolve, - std_features, - &explicit_host_kinds, - &pkg_set, - interner, - &profiles, - )? - } else { - Default::default() - }; - - let mut unit_graph = build_unit_dependencies( - ws, - &pkg_set, - &resolve, - &resolved_features, - std_resolve_features.as_ref(), - &units, - &scrape_units, - &std_roots, - build_config.mode, - &target_data, - &profiles, - interner, - )?; - - // TODO: In theory, Cargo should also dedupe the roots, but I'm uncertain - // what heuristics to use in that case. - if build_config.mode == (CompileMode::Doc { deps: true }) { - remove_duplicate_doc(build_config, &units, &mut unit_graph); - } - - if build_config - .requested_kinds - .iter() - .any(CompileKind::is_host) - { - // Rebuild the unit graph, replacing the explicit host targets with - // CompileKind::Host, merging any dependencies shared with build - // dependencies. - let new_graph = rebuild_unit_graph_shared( - interner, - unit_graph, - &units, - &scrape_units, - explicit_host_kind, - ); - // This would be nicer with destructuring assignment. - units = new_graph.0; - scrape_units = new_graph.1; - unit_graph = new_graph.2; - } - - let mut extra_compiler_args = HashMap::new(); - if let Some(args) = extra_args { - if units.len() != 1 { - anyhow::bail!( - "extra arguments to `{}` can only be passed to one \ - target, consider filtering\nthe package by passing, \ - e.g., `--lib` or `--bin NAME` to specify a single target", - extra_args_name - ); - } - extra_compiler_args.insert(units[0].clone(), args); - } - - for unit in &units { - if unit.mode.is_doc() || unit.mode.is_doc_test() { - let mut extra_args = local_rustdoc_args.clone(); - - // Add `--document-private-items` rustdoc flag if requested or if - // the target is a binary. Binary crates get their private items - // documented by default. - if rustdoc_document_private_items || unit.target.is_bin() { - let mut args = extra_args.take().unwrap_or_default(); - args.push("--document-private-items".into()); - extra_args = Some(args); - } - - if let Some(args) = extra_args { - extra_compiler_args - .entry(unit.clone()) - .or_default() - .extend(args); - } - } - } - - let mut crate_types = HashMap::new(); - if let Some(args) = target_rustc_crate_types { - if units.len() != 1 { - anyhow::bail!( - "crate types to rustc can only be passed to one \ - target, consider filtering\nthe package by passing, \ - e.g., `--lib` or `--example` to specify a single target" - ); - } - match units[0].target.kind() { - TargetKind::Lib(_) | TargetKind::ExampleLib(_) => { - crate_types.insert(units[0].clone(), args.clone()); - } - _ => { - anyhow::bail!( - "crate types can only be specified for libraries and example libraries.\n\ - Binaries, tests, and benchmarks are always the `bin` crate type" - ); - } - } - } - - if honor_rust_version { - // Remove any pre-release identifiers for easier comparison - let current_version = &target_data.rustc.version; - let untagged_version = semver::Version::new( - current_version.major, - current_version.minor, - current_version.patch, - ); - - for unit in unit_graph.keys() { - let version = match unit.pkg.rust_version() { - Some(v) => v, - None => continue, - }; - - let req = semver::VersionReq::parse(version).unwrap(); - if req.matches(&untagged_version) { - continue; - } - - anyhow::bail!( - "package `{}` cannot be built because it requires rustc {} or newer, \ - while the currently active rustc version is {}", - unit.pkg, - version, - current_version, - ); - } - } - - let bcx = BuildContext::new( - ws, - pkg_set, - build_config, - profiles, - extra_compiler_args, - crate_types, - target_data, - units, - unit_graph, - scrape_units, - )?; - - Ok(bcx) -} - -impl FilterRule { - pub fn new(targets: Vec, all: bool) -> FilterRule { - if all { - FilterRule::All - } else { - FilterRule::Just(targets) - } - } - - pub fn none() -> FilterRule { - FilterRule::Just(Vec::new()) - } - - fn matches(&self, target: &Target) -> bool { - match *self { - FilterRule::All => true, - FilterRule::Just(ref targets) => targets.iter().any(|x| *x == target.name()), - } - } - - fn is_specific(&self) -> bool { - match *self { - FilterRule::All => true, - FilterRule::Just(ref targets) => !targets.is_empty(), - } - } - - pub fn try_collect(&self) -> Option> { - match *self { - FilterRule::All => None, - FilterRule::Just(ref targets) => Some(targets.clone()), - } - } - - pub(crate) fn contains_glob_patterns(&self) -> bool { - match self { - FilterRule::All => false, - FilterRule::Just(targets) => targets.iter().any(is_glob_pattern), - } - } -} - -impl CompileFilter { - /// Construct a CompileFilter from raw command line arguments. - pub fn from_raw_arguments( - lib_only: bool, - bins: Vec, - all_bins: bool, - tsts: Vec, - all_tsts: bool, - exms: Vec, - all_exms: bool, - bens: Vec, - all_bens: bool, - all_targets: bool, - ) -> CompileFilter { - if all_targets { - return CompileFilter::new_all_targets(); - } - let rule_lib = if lib_only { - LibRule::True - } else { - LibRule::False - }; - let rule_bins = FilterRule::new(bins, all_bins); - let rule_tsts = FilterRule::new(tsts, all_tsts); - let rule_exms = FilterRule::new(exms, all_exms); - let rule_bens = FilterRule::new(bens, all_bens); - - CompileFilter::new(rule_lib, rule_bins, rule_tsts, rule_exms, rule_bens) - } - - /// Construct a CompileFilter from underlying primitives. - pub fn new( - rule_lib: LibRule, - rule_bins: FilterRule, - rule_tsts: FilterRule, - rule_exms: FilterRule, - rule_bens: FilterRule, - ) -> CompileFilter { - if rule_lib == LibRule::True - || rule_bins.is_specific() - || rule_tsts.is_specific() - || rule_exms.is_specific() - || rule_bens.is_specific() - { - CompileFilter::Only { - all_targets: false, - lib: rule_lib, - bins: rule_bins, - examples: rule_exms, - benches: rule_bens, - tests: rule_tsts, - } - } else { - CompileFilter::Default { - required_features_filterable: true, - } - } - } - - pub fn new_all_targets() -> CompileFilter { - CompileFilter::Only { - all_targets: true, - lib: LibRule::Default, - bins: FilterRule::All, - examples: FilterRule::All, - benches: FilterRule::All, - tests: FilterRule::All, - } - } - - pub fn need_dev_deps(&self, mode: CompileMode) -> bool { - match mode { - CompileMode::Test | CompileMode::Doctest | CompileMode::Bench => true, - CompileMode::Check { test: true } => true, - CompileMode::Build - | CompileMode::Doc { .. } - | CompileMode::Docscrape - | CompileMode::Check { test: false } => match *self { - CompileFilter::Default { .. } => false, - CompileFilter::Only { - ref examples, - ref tests, - ref benches, - .. - } => examples.is_specific() || tests.is_specific() || benches.is_specific(), - }, - CompileMode::RunCustomBuild => panic!("Invalid mode"), - } - } - - // this selects targets for "cargo run". for logic to select targets for - // other subcommands, see generate_targets and filter_default_targets - pub fn target_run(&self, target: &Target) -> bool { - match *self { - CompileFilter::Default { .. } => true, - CompileFilter::Only { - ref lib, - ref bins, - ref examples, - ref tests, - ref benches, - .. - } => { - let rule = match *target.kind() { - TargetKind::Bin => bins, - TargetKind::Test => tests, - TargetKind::Bench => benches, - TargetKind::ExampleBin | TargetKind::ExampleLib(..) => examples, - TargetKind::Lib(..) => { - return match *lib { - LibRule::True => true, - LibRule::Default => true, - LibRule::False => false, - }; - } - TargetKind::CustomBuild => return false, - }; - rule.matches(target) - } - } - } - - pub fn is_specific(&self) -> bool { - match *self { - CompileFilter::Default { .. } => false, - CompileFilter::Only { .. } => true, - } - } - - pub fn is_all_targets(&self) -> bool { - matches!( - *self, - CompileFilter::Only { - all_targets: true, - .. - } - ) - } - - pub(crate) fn contains_glob_patterns(&self) -> bool { - match self { - CompileFilter::Default { .. } => false, - CompileFilter::Only { - bins, - examples, - tests, - benches, - .. - } => { - bins.contains_glob_patterns() - || examples.contains_glob_patterns() - || tests.contains_glob_patterns() - || benches.contains_glob_patterns() - } - } - } -} - -/// A proposed target. -/// -/// Proposed targets are later filtered into actual `Unit`s based on whether or -/// not the target requires its features to be present. -#[derive(Debug)] -struct Proposal<'a> { - pkg: &'a Package, - target: &'a Target, - /// Indicates whether or not all required features *must* be present. If - /// false, and the features are not available, then it will be silently - /// skipped. Generally, targets specified by name (`--bin foo`) are - /// required, all others can be silently skipped if features are missing. - requires_features: bool, - mode: CompileMode, -} - -/// Generates all the base targets for the packages the user has requested to -/// compile. Dependencies for these targets are computed later in `unit_dependencies`. -fn generate_targets( - ws: &Workspace<'_>, - packages: &[&Package], - filter: &CompileFilter, - requested_kinds: &[CompileKind], - explicit_host_kind: CompileKind, - mode: CompileMode, - resolve: &Resolve, - workspace_resolve: &Option, - resolved_features: &features::ResolvedFeatures, - package_set: &PackageSet<'_>, - profiles: &Profiles, - interner: &UnitInterner, -) -> CargoResult> { - let config = ws.config(); - // Helper for creating a list of `Unit` structures - let new_unit = - |units: &mut HashSet, pkg: &Package, target: &Target, target_mode: CompileMode| { - let unit_for = if target_mode.is_any_test() { - // NOTE: the `UnitFor` here is subtle. If you have a profile - // with `panic` set, the `panic` flag is cleared for - // tests/benchmarks and their dependencies. If this - // was `normal`, then the lib would get compiled three - // times (once with panic, once without, and once with - // `--test`). - // - // This would cause a problem for doc tests, which would fail - // because `rustdoc` would attempt to link with both libraries - // at the same time. Also, it's probably not important (or - // even desirable?) for rustdoc to link with a lib with - // `panic` set. - // - // As a consequence, Examples and Binaries get compiled - // without `panic` set. This probably isn't a bad deal. - // - // Forcing the lib to be compiled three times during `cargo - // test` is probably also not desirable. - UnitFor::new_test(config) - } else if target.for_host() { - // Proc macro / plugin should not have `panic` set. - UnitFor::new_compiler() - } else { - UnitFor::new_normal() - }; - // Custom build units are added in `build_unit_dependencies`. - assert!(!target.is_custom_build()); - let target_mode = match target_mode { - CompileMode::Test => { - if target.is_example() && !filter.is_specific() && !target.tested() { - // Examples are included as regular binaries to verify - // that they compile. - CompileMode::Build - } else { - CompileMode::Test - } - } - CompileMode::Build => match *target.kind() { - TargetKind::Test => CompileMode::Test, - TargetKind::Bench => CompileMode::Bench, - _ => CompileMode::Build, - }, - // `CompileMode::Bench` is only used to inform `filter_default_targets` - // which command is being used (`cargo bench`). Afterwards, tests - // and benches are treated identically. Switching the mode allows - // de-duplication of units that are essentially identical. For - // example, `cargo build --all-targets --release` creates the units - // (lib profile:bench, mode:test) and (lib profile:bench, mode:bench) - // and since these are the same, we want them to be de-duplicated in - // `unit_dependencies`. - CompileMode::Bench => CompileMode::Test, - _ => target_mode, - }; - - let is_local = pkg.package_id().source_id().is_path(); - - // No need to worry about build-dependencies, roots are never build dependencies. - let features_for = FeaturesFor::from_for_host(target.proc_macro()); - let features = resolved_features.activated_features(pkg.package_id(), features_for); - - // If `--target` has not been specified, then the unit - // graph is built almost like if `--target $HOST` was - // specified. See `rebuild_unit_graph_shared` for more on - // why this is done. However, if the package has its own - // `package.target` key, then this gets used instead of - // `$HOST` - let explicit_kinds = if let Some(k) = pkg.manifest().forced_kind() { - vec![k] - } else { - requested_kinds - .iter() - .map(|kind| match kind { - CompileKind::Host => { - pkg.manifest().default_kind().unwrap_or(explicit_host_kind) - } - CompileKind::Target(t) => CompileKind::Target(*t), - }) - .collect() - }; - - for kind in explicit_kinds.iter() { - let profile = profiles.get_profile( - pkg.package_id(), - ws.is_member(pkg), - is_local, - unit_for, - target_mode, - *kind, - ); - let unit = interner.intern( - pkg, - target, - profile, - kind.for_target(target), - target_mode, - features.clone(), - /*is_std*/ false, - /*dep_hash*/ 0, - ); - units.insert(unit); - } - }; - - // Create a list of proposed targets. - let mut proposals: Vec> = Vec::new(); - - match *filter { - CompileFilter::Default { - required_features_filterable, - } => { - for pkg in packages { - let default = filter_default_targets(pkg.targets(), mode); - proposals.extend(default.into_iter().map(|target| Proposal { - pkg, - target, - requires_features: !required_features_filterable, - mode, - })); - if mode == CompileMode::Test { - if let Some(t) = pkg - .targets() - .iter() - .find(|t| t.is_lib() && t.doctested() && t.doctestable()) - { - proposals.push(Proposal { - pkg, - target: t, - requires_features: false, - mode: CompileMode::Doctest, - }); - } - } - } - } - CompileFilter::Only { - all_targets, - ref lib, - ref bins, - ref examples, - ref tests, - ref benches, - } => { - if *lib != LibRule::False { - let mut libs = Vec::new(); - for proposal in filter_targets(packages, Target::is_lib, false, mode) { - let Proposal { target, pkg, .. } = proposal; - if mode.is_doc_test() && !target.doctestable() { - let types = target.rustc_crate_types(); - let types_str: Vec<&str> = types.iter().map(|t| t.as_str()).collect(); - ws.config().shell().warn(format!( - "doc tests are not supported for crate type(s) `{}` in package `{}`", - types_str.join(", "), - pkg.name() - ))?; - } else { - libs.push(proposal) - } - } - if !all_targets && libs.is_empty() && *lib == LibRule::True { - let names = packages.iter().map(|pkg| pkg.name()).collect::>(); - if names.len() == 1 { - anyhow::bail!("no library targets found in package `{}`", names[0]); - } else { - anyhow::bail!("no library targets found in packages: {}", names.join(", ")); - } - } - proposals.extend(libs); - } - - // If `--tests` was specified, add all targets that would be - // generated by `cargo test`. - let test_filter = match tests { - FilterRule::All => Target::tested, - FilterRule::Just(_) => Target::is_test, - }; - let test_mode = match mode { - CompileMode::Build => CompileMode::Test, - CompileMode::Check { .. } => CompileMode::Check { test: true }, - _ => mode, - }; - // If `--benches` was specified, add all targets that would be - // generated by `cargo bench`. - let bench_filter = match benches { - FilterRule::All => Target::benched, - FilterRule::Just(_) => Target::is_bench, - }; - let bench_mode = match mode { - CompileMode::Build => CompileMode::Bench, - CompileMode::Check { .. } => CompileMode::Check { test: true }, - _ => mode, - }; - - proposals.extend(list_rule_targets( - packages, - bins, - "bin", - Target::is_bin, - mode, - )?); - proposals.extend(list_rule_targets( - packages, - examples, - "example", - Target::is_example, - mode, - )?); - proposals.extend(list_rule_targets( - packages, - tests, - "test", - test_filter, - test_mode, - )?); - proposals.extend(list_rule_targets( - packages, - benches, - "bench", - bench_filter, - bench_mode, - )?); - } - } - - // Only include targets that are libraries or have all required - // features available. - // - // `features_map` is a map of &Package -> enabled_features - // It is computed by the set of enabled features for the package plus - // every enabled feature of every enabled dependency. - let mut features_map = HashMap::new(); - // This needs to be a set to de-duplicate units. Due to the way the - // targets are filtered, it is possible to have duplicate proposals for - // the same thing. - let mut units = HashSet::new(); - for Proposal { - pkg, - target, - requires_features, - mode, - } in proposals - { - let unavailable_features = match target.required_features() { - Some(rf) => { - validate_required_features( - workspace_resolve, - target.name(), - rf, - pkg.summary(), - &mut config.shell(), - )?; - - let features = features_map.entry(pkg).or_insert_with(|| { - resolve_all_features(resolve, resolved_features, package_set, pkg.package_id()) - }); - rf.iter().filter(|f| !features.contains(*f)).collect() - } - None => Vec::new(), - }; - if target.is_lib() || unavailable_features.is_empty() { - new_unit(&mut units, pkg, target, mode); - } else if requires_features { - let required_features = target.required_features().unwrap(); - let quoted_required_features: Vec = required_features - .iter() - .map(|s| format!("`{}`", s)) - .collect(); - anyhow::bail!( - "target `{}` in package `{}` requires the features: {}\n\ - Consider enabling them by passing, e.g., `--features=\"{}\"`", - target.name(), - pkg.name(), - quoted_required_features.join(", "), - required_features.join(" ") - ); - } - // else, silently skip target. - } - let mut units: Vec<_> = units.into_iter().collect(); - unmatched_target_filters(&units, filter, &mut ws.config().shell())?; - - // Keep the roots in a consistent order, which helps with checking test output. - units.sort_unstable(); - Ok(units) -} - -/// Checks if the unit list is empty and the user has passed any combination of -/// --tests, --examples, --benches or --bins, and we didn't match on any targets. -/// We want to emit a warning to make sure the user knows that this run is a no-op, -/// and their code remains unchecked despite cargo not returning any errors -fn unmatched_target_filters( - units: &[Unit], - filter: &CompileFilter, - shell: &mut Shell, -) -> CargoResult<()> { - if let CompileFilter::Only { - all_targets, - lib: _, - ref bins, - ref examples, - ref tests, - ref benches, - } = *filter - { - if units.is_empty() { - let mut filters = String::new(); - let mut miss_count = 0; - - let mut append = |t: &FilterRule, s| { - if let FilterRule::All = *t { - miss_count += 1; - filters.push_str(s); - } - }; - - if all_targets { - filters.push_str(" `all-targets`"); - } else { - append(bins, " `bins`,"); - append(tests, " `tests`,"); - append(examples, " `examples`,"); - append(benches, " `benches`,"); - filters.pop(); - } - - return shell.warn(format!( - "Target {}{} specified, but no targets matched. This is a no-op", - if miss_count > 1 { "filters" } else { "filter" }, - filters, - )); - } - } - - Ok(()) -} - -/// Warns if a target's required-features references a feature that doesn't exist. -/// -/// This is a warning because historically this was not validated, and it -/// would cause too much breakage to make it an error. -fn validate_required_features( - resolve: &Option, - target_name: &str, - required_features: &[String], - summary: &Summary, - shell: &mut Shell, -) -> CargoResult<()> { - let resolve = match resolve { - None => return Ok(()), - Some(resolve) => resolve, - }; - - for feature in required_features { - let fv = FeatureValue::new(feature.into()); - match &fv { - FeatureValue::Feature(f) => { - if !summary.features().contains_key(f) { - shell.warn(format!( - "invalid feature `{}` in required-features of target `{}`: \ - `{}` is not present in [features] section", - fv, target_name, fv - ))?; - } - } - FeatureValue::Dep { .. } => { - anyhow::bail!( - "invalid feature `{}` in required-features of target `{}`: \ - `dep:` prefixed feature values are not allowed in required-features", - fv, - target_name - ); - } - FeatureValue::DepFeature { weak: true, .. } => { - anyhow::bail!( - "invalid feature `{}` in required-features of target `{}`: \ - optional dependency with `?` is not allowed in required-features", - fv, - target_name - ); - } - // Handling of dependent_crate/dependent_crate_feature syntax - FeatureValue::DepFeature { - dep_name, - dep_feature, - weak: false, - } => { - match resolve - .deps(summary.package_id()) - .find(|(_dep_id, deps)| deps.iter().any(|dep| dep.name_in_toml() == *dep_name)) - { - Some((dep_id, _deps)) => { - let dep_summary = resolve.summary(dep_id); - if !dep_summary.features().contains_key(dep_feature) - && !dep_summary - .dependencies() - .iter() - .any(|dep| dep.name_in_toml() == *dep_feature && dep.is_optional()) - { - shell.warn(format!( - "invalid feature `{}` in required-features of target `{}`: \ - feature `{}` does not exist in package `{}`", - fv, target_name, dep_feature, dep_id - ))?; - } - } - None => { - shell.warn(format!( - "invalid feature `{}` in required-features of target `{}`: \ - dependency `{}` does not exist", - fv, target_name, dep_name - ))?; - } - } - } - } - } - Ok(()) -} - -/// Gets all of the features enabled for a package, plus its dependencies' -/// features. -/// -/// Dependencies are added as `dep_name/feat_name` because `required-features` -/// wants to support that syntax. -pub fn resolve_all_features( - resolve_with_overrides: &Resolve, - resolved_features: &features::ResolvedFeatures, - package_set: &PackageSet<'_>, - package_id: PackageId, -) -> HashSet { - let mut features: HashSet = resolved_features - .activated_features(package_id, FeaturesFor::NormalOrDev) - .iter() - .map(|s| s.to_string()) - .collect(); - - // Include features enabled for use by dependencies so targets can also use them with the - // required-features field when deciding whether to be built or skipped. - for (dep_id, deps) in resolve_with_overrides.deps(package_id) { - let is_proc_macro = package_set - .get_one(dep_id) - .expect("packages downloaded") - .proc_macro(); - for dep in deps { - let features_for = FeaturesFor::from_for_host(is_proc_macro || dep.is_build()); - for feature in resolved_features - .activated_features_unverified(dep_id, features_for) - .unwrap_or_default() - { - features.insert(format!("{}/{}", dep.name_in_toml(), feature)); - } - } - } - - features -} - -/// Given a list of all targets for a package, filters out only the targets -/// that are automatically included when the user doesn't specify any targets. -fn filter_default_targets(targets: &[Target], mode: CompileMode) -> Vec<&Target> { - match mode { - CompileMode::Bench => targets.iter().filter(|t| t.benched()).collect(), - CompileMode::Test => targets - .iter() - .filter(|t| t.tested() || t.is_example()) - .collect(), - CompileMode::Build | CompileMode::Check { .. } => targets - .iter() - .filter(|t| t.is_bin() || t.is_lib()) - .collect(), - CompileMode::Doc { .. } => { - // `doc` does lib and bins (bin with same name as lib is skipped). - targets - .iter() - .filter(|t| { - t.documented() - && (!t.is_bin() - || !targets.iter().any(|l| l.is_lib() && l.name() == t.name())) - }) - .collect() - } - CompileMode::Doctest | CompileMode::Docscrape | CompileMode::RunCustomBuild => { - panic!("Invalid mode {:?}", mode) - } - } -} - -/// Returns a list of proposed targets based on command-line target selection flags. -fn list_rule_targets<'a>( - packages: &[&'a Package], - rule: &FilterRule, - target_desc: &'static str, - is_expected_kind: fn(&Target) -> bool, - mode: CompileMode, -) -> CargoResult>> { - let mut proposals = Vec::new(); - match rule { - FilterRule::All => { - proposals.extend(filter_targets(packages, is_expected_kind, false, mode)) - } - FilterRule::Just(names) => { - for name in names { - proposals.extend(find_named_targets( - packages, - name, - target_desc, - is_expected_kind, - mode, - )?); - } - } - } - Ok(proposals) -} - -/// Finds the targets for a specifically named target. -fn find_named_targets<'a>( - packages: &[&'a Package], - target_name: &str, - target_desc: &'static str, - is_expected_kind: fn(&Target) -> bool, - mode: CompileMode, -) -> CargoResult>> { - let is_glob = is_glob_pattern(target_name); - let proposals = if is_glob { - let pattern = build_glob(target_name)?; - let filter = |t: &Target| is_expected_kind(t) && pattern.matches(t.name()); - filter_targets(packages, filter, true, mode) - } else { - let filter = |t: &Target| t.name() == target_name && is_expected_kind(t); - filter_targets(packages, filter, true, mode) - }; - - if proposals.is_empty() { - let targets = packages.iter().flat_map(|pkg| { - pkg.targets() - .iter() - .filter(|target| is_expected_kind(target)) - }); - let suggestion = closest_msg(target_name, targets, |t| t.name()); - anyhow::bail!( - "no {} target {} `{}`{}", - target_desc, - if is_glob { "matches pattern" } else { "named" }, - target_name, - suggestion - ); - } - Ok(proposals) -} - -fn filter_targets<'a>( - packages: &[&'a Package], - predicate: impl Fn(&Target) -> bool, - requires_features: bool, - mode: CompileMode, -) -> Vec> { - let mut proposals = Vec::new(); - for pkg in packages { - for target in pkg.targets().iter().filter(|t| predicate(t)) { - proposals.push(Proposal { - pkg, - target, - requires_features, - mode, - }); - } - } - proposals -} - -/// This is used to rebuild the unit graph, sharing host dependencies if possible. -/// -/// This will translate any unit's `CompileKind::Target(host)` to -/// `CompileKind::Host` if the kind is equal to `to_host`. This also handles -/// generating the unit `dep_hash`, and merging shared units if possible. -/// -/// This is necessary because if normal dependencies used `CompileKind::Host`, -/// there would be no way to distinguish those units from build-dependency -/// units. This can cause a problem if a shared normal/build dependency needs -/// to link to another dependency whose features differ based on whether or -/// not it is a normal or build dependency. If both units used -/// `CompileKind::Host`, then they would end up being identical, causing a -/// collision in the `UnitGraph`, and Cargo would end up randomly choosing one -/// value or the other. -/// -/// The solution is to keep normal and build dependencies separate when -/// building the unit graph, and then run this second pass which will try to -/// combine shared dependencies safely. By adding a hash of the dependencies -/// to the `Unit`, this allows the `CompileKind` to be changed back to `Host` -/// without fear of an unwanted collision. -fn rebuild_unit_graph_shared( - interner: &UnitInterner, - unit_graph: UnitGraph, - roots: &[Unit], - scrape_units: &[Unit], - to_host: CompileKind, -) -> (Vec, Vec, UnitGraph) { - let mut result = UnitGraph::new(); - // Map of the old unit to the new unit, used to avoid recursing into units - // that have already been computed to improve performance. - let mut memo = HashMap::new(); - let new_roots = roots - .iter() - .map(|root| { - traverse_and_share(interner, &mut memo, &mut result, &unit_graph, root, to_host) - }) - .collect(); - let new_scrape_units = scrape_units - .iter() - .map(|unit| memo.get(unit).unwrap().clone()) - .collect(); - (new_roots, new_scrape_units, result) -} - -/// Recursive function for rebuilding the graph. -/// -/// This walks `unit_graph`, starting at the given `unit`. It inserts the new -/// units into `new_graph`, and returns a new updated version of the given -/// unit (`dep_hash` is filled in, and `kind` switched if necessary). -fn traverse_and_share( - interner: &UnitInterner, - memo: &mut HashMap, - new_graph: &mut UnitGraph, - unit_graph: &UnitGraph, - unit: &Unit, - to_host: CompileKind, -) -> Unit { - if let Some(new_unit) = memo.get(unit) { - // Already computed, no need to recompute. - return new_unit.clone(); - } - let mut dep_hash = StableHasher::new(); - let new_deps: Vec<_> = unit_graph[unit] - .iter() - .map(|dep| { - let new_dep_unit = - traverse_and_share(interner, memo, new_graph, unit_graph, &dep.unit, to_host); - new_dep_unit.hash(&mut dep_hash); - UnitDep { - unit: new_dep_unit, - ..dep.clone() - } - }) - .collect(); - let new_dep_hash = dep_hash.finish(); - let new_kind = if unit.kind == to_host { - CompileKind::Host - } else { - unit.kind - }; - let new_unit = interner.intern( - &unit.pkg, - &unit.target, - unit.profile, - new_kind, - unit.mode, - unit.features.clone(), - unit.is_std, - new_dep_hash, - ); - assert!(memo.insert(unit.clone(), new_unit.clone()).is_none()); - new_graph.entry(new_unit.clone()).or_insert(new_deps); - new_unit -} - -/// Build `glob::Pattern` with informative context. -fn build_glob(pat: &str) -> CargoResult { - glob::Pattern::new(pat).with_context(|| format!("cannot build glob pattern from `{}`", pat)) -} - -/// Emits "package not found" error. -/// -/// > This function should be used only in package selection processes such like -/// `Packages::to_package_id_specs` and `Packages::get_packages`. -fn emit_package_not_found( - ws: &Workspace<'_>, - opt_names: BTreeSet<&str>, - opt_out: bool, -) -> CargoResult<()> { - if !opt_names.is_empty() { - anyhow::bail!( - "{}package(s) `{}` not found in workspace `{}`", - if opt_out { "excluded " } else { "" }, - opt_names.into_iter().collect::>().join(", "), - ws.root().display(), - ) - } - Ok(()) -} - -/// Emits "glob pattern not found" error. -/// -/// > This function should be used only in package selection processes such like -/// `Packages::to_package_id_specs` and `Packages::get_packages`. -fn emit_pattern_not_found( - ws: &Workspace<'_>, - opt_patterns: Vec<(glob::Pattern, bool)>, - opt_out: bool, -) -> CargoResult<()> { - let not_matched = opt_patterns - .iter() - .filter(|(_, matched)| !*matched) - .map(|(pat, _)| pat.as_str()) - .collect::>(); - if !not_matched.is_empty() { - anyhow::bail!( - "{}package pattern(s) `{}` not found in workspace `{}`", - if opt_out { "excluded " } else { "" }, - not_matched.join(", "), - ws.root().display(), - ) - } - Ok(()) -} - -/// Checks whether a package matches any of a list of glob patterns generated -/// from `opt_patterns_and_names`. -/// -/// > This function should be used only in package selection processes such like -/// `Packages::to_package_id_specs` and `Packages::get_packages`. -fn match_patterns(pkg: &Package, patterns: &mut Vec<(glob::Pattern, bool)>) -> bool { - patterns.iter_mut().any(|(m, matched)| { - let is_matched = m.matches(pkg.name().as_str()); - *matched |= is_matched; - is_matched - }) -} - -/// Given a list opt-in or opt-out package selection strings, generates two -/// collections that represent glob patterns and package names respectively. -/// -/// > This function should be used only in package selection processes such like -/// `Packages::to_package_id_specs` and `Packages::get_packages`. -fn opt_patterns_and_names( - opt: &[String], -) -> CargoResult<(Vec<(glob::Pattern, bool)>, BTreeSet<&str>)> { - let mut opt_patterns = Vec::new(); - let mut opt_names = BTreeSet::new(); - for x in opt.iter() { - if is_glob_pattern(x) { - opt_patterns.push((build_glob(x)?, false)); - } else { - opt_names.insert(String::as_str(x)); - } - } - Ok((opt_patterns, opt_names)) -} - -/// Removes duplicate CompileMode::Doc units that would cause problems with -/// filename collisions. -/// -/// Rustdoc only separates units by crate name in the file directory -/// structure. If any two units with the same crate name exist, this would -/// cause a filename collision, causing different rustdoc invocations to stomp -/// on one another's files. -/// -/// Unfortunately this does not remove all duplicates, as some of them are -/// either user error, or difficult to remove. Cases that I can think of: -/// -/// - Same target name in different packages. See the `collision_doc` test. -/// - Different sources. See `collision_doc_sources` test. -/// -/// Ideally this would not be necessary. -fn remove_duplicate_doc( - build_config: &BuildConfig, - root_units: &[Unit], - unit_graph: &mut UnitGraph, -) { - // First, create a mapping of crate_name -> Unit so we can see where the - // duplicates are. - let mut all_docs: HashMap> = HashMap::new(); - for unit in unit_graph.keys() { - if unit.mode.is_doc() { - all_docs - .entry(unit.target.crate_name()) - .or_default() - .push(unit.clone()); - } - } - // Keep track of units to remove so that they can be efficiently removed - // from the unit_deps. - let mut removed_units: HashSet = HashSet::new(); - let mut remove = |units: Vec, reason: &str, cb: &dyn Fn(&Unit) -> bool| -> Vec { - let (to_remove, remaining_units): (Vec, Vec) = units - .into_iter() - .partition(|unit| cb(unit) && !root_units.contains(unit)); - for unit in to_remove { - log::debug!( - "removing duplicate doc due to {} for package {} target `{}`", - reason, - unit.pkg, - unit.target.name() - ); - unit_graph.remove(&unit); - removed_units.insert(unit); - } - remaining_units - }; - // Iterate over the duplicates and try to remove them from unit_graph. - for (_crate_name, mut units) in all_docs { - if units.len() == 1 { - continue; - } - // Prefer target over host if --target was not specified. - if build_config - .requested_kinds - .iter() - .all(CompileKind::is_host) - { - // Note these duplicates may not be real duplicates, since they - // might get merged in rebuild_unit_graph_shared. Either way, it - // shouldn't hurt to remove them early (although the report in the - // log might be confusing). - units = remove(units, "host/target merger", &|unit| unit.kind.is_host()); - if units.len() == 1 { - continue; - } - } - // Prefer newer versions over older. - let mut source_map: HashMap<(InternedString, SourceId, CompileKind), Vec> = - HashMap::new(); - for unit in units { - let pkg_id = unit.pkg.package_id(); - // Note, this does not detect duplicates from different sources. - source_map - .entry((pkg_id.name(), pkg_id.source_id(), unit.kind)) - .or_default() - .push(unit); - } - let mut remaining_units = Vec::new(); - for (_key, mut units) in source_map { - if units.len() > 1 { - units.sort_by(|a, b| a.pkg.version().partial_cmp(b.pkg.version()).unwrap()); - // Remove any entries with version < newest. - let newest_version = units.last().unwrap().pkg.version().clone(); - let keep_units = remove(units, "older version", &|unit| { - unit.pkg.version() < &newest_version - }); - remaining_units.extend(keep_units); - } else { - remaining_units.extend(units); - } - } - if remaining_units.len() == 1 { - continue; - } - // Are there other heuristics to remove duplicates that would make - // sense? Maybe prefer path sources over all others? - } - // Also remove units from the unit_deps so there aren't any dangling edges. - for unit_deps in unit_graph.values_mut() { - unit_deps.retain(|unit_dep| !removed_units.contains(&unit_dep.unit)); - } - // Remove any orphan units that were detached from the graph. - let mut visited = HashSet::new(); - fn visit(unit: &Unit, graph: &UnitGraph, visited: &mut HashSet) { - if !visited.insert(unit.clone()) { - return; - } - for dep in &graph[unit] { - visit(&dep.unit, graph, visited); - } - } - for unit in root_units { - visit(unit, unit_graph, &mut visited); - } - unit_graph.retain(|unit, _| visited.contains(unit)); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_config.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_config.rs deleted file mode 100644 index e84e84edf..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_config.rs +++ /dev/null @@ -1,306 +0,0 @@ -//! Implementation of `cargo config` subcommand. - -use crate::util::config::{Config, ConfigKey, ConfigValue as CV, Definition}; -use crate::util::errors::CargoResult; -use crate::{drop_eprintln, drop_println}; -use anyhow::{bail, format_err, Error}; -use serde_json::json; -use std::borrow::Cow; -use std::fmt; -use std::str::FromStr; - -pub enum ConfigFormat { - Toml, - Json, - JsonValue, -} - -impl ConfigFormat { - /// For clap. - pub const POSSIBLE_VALUES: &'static [&'static str] = &["toml", "json", "json-value"]; -} - -impl FromStr for ConfigFormat { - type Err = Error; - fn from_str(s: &str) -> CargoResult { - match s { - "toml" => Ok(ConfigFormat::Toml), - "json" => Ok(ConfigFormat::Json), - "json-value" => Ok(ConfigFormat::JsonValue), - f => bail!("unknown config format `{}`", f), - } - } -} - -impl fmt::Display for ConfigFormat { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - ConfigFormat::Toml => write!(f, "toml"), - ConfigFormat::Json => write!(f, "json"), - ConfigFormat::JsonValue => write!(f, "json-value"), - } - } -} - -/// Options for `cargo config get`. -pub struct GetOptions<'a> { - pub key: Option<&'a str>, - pub format: ConfigFormat, - pub show_origin: bool, - pub merged: bool, -} - -pub fn get(config: &Config, opts: &GetOptions<'_>) -> CargoResult<()> { - if opts.show_origin && !matches!(opts.format, ConfigFormat::Toml) { - bail!( - "the `{}` format does not support --show-origin, try the `toml` format instead", - opts.format - ); - } - let key = match opts.key { - Some(key) => ConfigKey::from_str(key), - None => ConfigKey::new(), - }; - if opts.merged { - let cv = config - .get_cv_with_env(&key)? - .ok_or_else(|| format_err!("config value `{}` is not set", key))?; - match opts.format { - ConfigFormat::Toml => print_toml(config, opts, &key, &cv), - ConfigFormat::Json => print_json(config, &key, &cv, true), - ConfigFormat::JsonValue => print_json(config, &key, &cv, false), - } - if let Some(env) = maybe_env(config, &key, &cv) { - match opts.format { - ConfigFormat::Toml => print_toml_env(config, &env), - ConfigFormat::Json | ConfigFormat::JsonValue => print_json_env(config, &env), - } - } - } else { - match &opts.format { - ConfigFormat::Toml => print_toml_unmerged(config, opts, &key)?, - format => bail!( - "the `{}` format does not support --merged=no, try the `toml` format instead", - format - ), - } - } - Ok(()) -} - -/// Checks for environment variables that might be used. -fn maybe_env<'config>( - config: &'config Config, - key: &ConfigKey, - cv: &CV, -) -> Option> { - // Only fetching a table is unable to load env values. Leaf entries should - // work properly. - match cv { - CV::Table(_map, _def) => {} - _ => return None, - } - let mut env: Vec<_> = config - .env() - .iter() - .filter(|(env_key, _val)| env_key.starts_with(&format!("{}_", key.as_env_key()))) - .collect(); - env.sort_by_key(|x| x.0); - if env.is_empty() { - None - } else { - Some(env) - } -} - -fn print_toml(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey, cv: &CV) { - let origin = |def: &Definition| -> String { - if !opts.show_origin { - return "".to_string(); - } - format!(" # {}", def) - }; - match cv { - CV::Boolean(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), - CV::Integer(val, def) => drop_println!(config, "{} = {}{}", key, val, origin(def)), - CV::String(val, def) => drop_println!( - config, - "{} = {}{}", - key, - toml::to_string(&val).unwrap(), - origin(def) - ), - CV::List(vals, _def) => { - if opts.show_origin { - drop_println!(config, "{} = [", key); - for (val, def) in vals { - drop_println!(config, " {}, # {}", toml::to_string(&val).unwrap(), def); - } - drop_println!(config, "]"); - } else { - let vals: Vec<&String> = vals.iter().map(|x| &x.0).collect(); - drop_println!(config, "{} = {}", key, toml::to_string(&vals).unwrap()); - } - } - CV::Table(table, _def) => { - let mut key_vals: Vec<_> = table.iter().collect(); - key_vals.sort_by(|a, b| a.0.cmp(b.0)); - for (table_key, val) in key_vals { - let mut subkey = key.clone(); - // push or push_sensitive shouldn't matter here, since this is - // not dealing with environment variables. - subkey.push(table_key); - print_toml(config, opts, &subkey, val); - } - } - } -} - -fn print_toml_env(config: &Config, env: &[(&String, &String)]) { - drop_println!( - config, - "# The following environment variables may affect the loaded values." - ); - for (env_key, env_value) in env { - let val = shell_escape::escape(Cow::Borrowed(env_value)); - drop_println!(config, "# {}={}", env_key, val); - } -} - -fn print_json_env(config: &Config, env: &[(&String, &String)]) { - drop_eprintln!( - config, - "note: The following environment variables may affect the loaded values." - ); - for (env_key, env_value) in env { - let val = shell_escape::escape(Cow::Borrowed(env_value)); - drop_eprintln!(config, "{}={}", env_key, val); - } -} - -fn print_json(config: &Config, key: &ConfigKey, cv: &CV, include_key: bool) { - let json_value = if key.is_root() || !include_key { - cv_to_json(cv) - } else { - let mut parts: Vec<_> = key.parts().collect(); - let last_part = parts.pop().unwrap(); - let mut root_table = json!({}); - // Create a JSON object with nested keys up to the value being displayed. - let mut table = &mut root_table; - for part in parts { - table[part] = json!({}); - table = table.get_mut(part).unwrap(); - } - table[last_part] = cv_to_json(cv); - root_table - }; - drop_println!(config, "{}", serde_json::to_string(&json_value).unwrap()); - - // Helper for recursively converting a CV to JSON. - fn cv_to_json(cv: &CV) -> serde_json::Value { - match cv { - CV::Boolean(val, _def) => json!(val), - CV::Integer(val, _def) => json!(val), - CV::String(val, _def) => json!(val), - CV::List(vals, _def) => { - let jvals: Vec<_> = vals.iter().map(|(val, _def)| json!(val)).collect(); - json!(jvals) - } - CV::Table(map, _def) => { - let mut table = json!({}); - for (key, val) in map { - table[key] = cv_to_json(val); - } - table - } - } - } -} - -fn print_toml_unmerged(config: &Config, opts: &GetOptions<'_>, key: &ConfigKey) -> CargoResult<()> { - let print_table = |cv: &CV| { - drop_println!(config, "# {}", cv.definition()); - print_toml(config, opts, &ConfigKey::new(), cv); - drop_println!(config, ""); - }; - // This removes entries from the given CV so that all that remains is the - // given key. Returns false if no entries were found. - fn trim_cv(mut cv: &mut CV, key: &ConfigKey) -> CargoResult { - for (i, part) in key.parts().enumerate() { - match cv { - CV::Table(map, _def) => { - map.retain(|key, _value| key == part); - match map.get_mut(part) { - Some(val) => cv = val, - None => return Ok(false), - } - } - _ => { - let mut key_so_far = ConfigKey::new(); - for part in key.parts().take(i) { - key_so_far.push(part); - } - bail!( - "expected table for configuration key `{}`, \ - but found {} in {}", - key_so_far, - cv.desc(), - cv.definition() - ) - } - } - } - Ok(match cv { - CV::Table(map, _def) => !map.is_empty(), - _ => true, - }) - } - - let mut cli_args = config.cli_args_as_table()?; - if trim_cv(&mut cli_args, key)? { - print_table(&cli_args); - } - - // This slurps up some extra env vars that aren't technically part of the - // "config" (or are special-cased). I'm personally fine with just keeping - // them here, though it might be confusing. The vars I'm aware of: - // - // * CARGO - // * CARGO_HOME - // * CARGO_NAME - // * CARGO_EMAIL - // * CARGO_INCREMENTAL - // * CARGO_TARGET_DIR - // * CARGO_CACHE_RUSTC_INFO - // - // All of these except CARGO, CARGO_HOME, and CARGO_CACHE_RUSTC_INFO are - // actually part of the config, but they are special-cased in the code. - // - // TODO: It might be a good idea to teach the Config loader to support - // environment variable aliases so that these special cases are less - // special, and will just naturally get loaded as part of the config. - let mut env: Vec<_> = config - .env() - .iter() - .filter(|(env_key, _val)| env_key.starts_with(key.as_env_key())) - .collect(); - if !env.is_empty() { - env.sort_by_key(|x| x.0); - drop_println!(config, "# Environment variables"); - for (key, value) in env { - // Displaying this in "shell" syntax instead of TOML, since that - // somehow makes more sense to me. - let val = shell_escape::escape(Cow::Borrowed(value)); - drop_println!(config, "# {}={}", key, val); - } - drop_println!(config, ""); - } - - let unmerged = config.load_values_unmerged()?; - for mut cv in unmerged { - if trim_cv(&mut cv, key)? { - print_table(&cv); - } - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_doc.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_doc.rs deleted file mode 100644 index b4f2b7d17..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_doc.rs +++ /dev/null @@ -1,74 +0,0 @@ -use crate::core::{Shell, Workspace}; -use crate::ops; -use crate::util::config::PathAndArgs; -use crate::util::CargoResult; -use std::path::Path; -use std::path::PathBuf; -use std::process::Command; - -/// Strongly typed options for the `cargo doc` command. -#[derive(Debug)] -pub struct DocOptions { - /// Whether to attempt to open the browser after compiling the docs - pub open_result: bool, - /// Options to pass through to the compiler - pub compile_opts: ops::CompileOptions, -} - -/// Main method for `cargo doc`. -pub fn doc(ws: &Workspace<'_>, options: &DocOptions) -> CargoResult<()> { - let compilation = ops::compile(ws, &options.compile_opts)?; - - if options.open_result { - let name = &compilation - .root_crate_names - .get(0) - .ok_or_else(|| anyhow::anyhow!("no crates with documentation"))?; - let kind = options.compile_opts.build_config.single_requested_kind()?; - let path = compilation.root_output[&kind] - .with_file_name("doc") - .join(&name) - .join("index.html"); - if path.exists() { - let config_browser = { - let cfg: Option = ws.config().get("doc.browser")?; - cfg.map(|path_args| (path_args.path.resolve_program(ws.config()), path_args.args)) - }; - - let mut shell = ws.config().shell(); - shell.status("Opening", path.display())?; - open_docs(&path, &mut shell, config_browser)?; - } - } - - Ok(()) -} - -fn open_docs( - path: &Path, - shell: &mut Shell, - config_browser: Option<(PathBuf, Vec)>, -) -> CargoResult<()> { - let browser = - config_browser.or_else(|| Some((PathBuf::from(std::env::var_os("BROWSER")?), Vec::new()))); - - match browser { - Some((browser, initial_args)) => { - if let Err(e) = Command::new(&browser).args(initial_args).arg(path).status() { - shell.warn(format!( - "Couldn't open docs with {}: {}", - browser.to_string_lossy(), - e - ))?; - } - } - None => { - if let Err(e) = opener::open(&path) { - let e = e.into(); - crate::display_warning_with_error("couldn't open docs", &e, shell); - } - } - }; - - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_fetch.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_fetch.rs deleted file mode 100644 index 1e0d855d0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_fetch.rs +++ /dev/null @@ -1,63 +0,0 @@ -use crate::core::compiler::{BuildConfig, CompileMode, RustcTargetData}; -use crate::core::{PackageSet, Resolve, Workspace}; -use crate::ops; -use crate::util::CargoResult; -use crate::util::Config; -use std::collections::HashSet; - -pub struct FetchOptions<'a> { - pub config: &'a Config, - /// The target arch triple to fetch dependencies for - pub targets: Vec, -} - -/// Executes `cargo fetch`. -pub fn fetch<'a>( - ws: &Workspace<'a>, - options: &FetchOptions<'a>, -) -> CargoResult<(Resolve, PackageSet<'a>)> { - ws.emit_warnings()?; - let (packages, resolve) = ops::resolve_ws(ws)?; - - let jobs = Some(1); - let config = ws.config(); - let build_config = BuildConfig::new(config, jobs, &options.targets, CompileMode::Build)?; - let data = RustcTargetData::new(ws, &build_config.requested_kinds)?; - let mut fetched_packages = HashSet::new(); - let mut deps_to_fetch = ws.members().map(|p| p.package_id()).collect::>(); - let mut to_download = Vec::new(); - - while let Some(id) = deps_to_fetch.pop() { - if !fetched_packages.insert(id) { - continue; - } - - to_download.push(id); - let deps = resolve - .deps(id) - .filter(|&(_id, deps)| { - deps.iter().any(|d| { - // If no target was specified then all dependencies are - // fetched. - if options.targets.is_empty() { - return true; - } - - // Otherwise we only download this dependency if any of the - // requested platforms would match this dependency. Note - // that this is a bit lossy because not all dependencies are - // always compiled for all platforms, but it should be - // "close enough" for now. - build_config - .requested_kinds - .iter() - .any(|kind| data.dep_platform_activated(d, *kind)) - }) - }) - .map(|(id, _deps)| id); - deps_to_fetch.extend(deps); - } - packages.get_many(to_download)?; - - Ok((resolve, packages)) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_generate_lockfile.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_generate_lockfile.rs deleted file mode 100644 index 04d4010f4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_generate_lockfile.rs +++ /dev/null @@ -1,252 +0,0 @@ -use crate::core::registry::PackageRegistry; -use crate::core::resolver::features::{CliFeatures, HasDevUnits}; -use crate::core::{PackageId, PackageIdSpec}; -use crate::core::{Resolve, SourceId, Workspace}; -use crate::ops; -use crate::util::config::Config; -use crate::util::CargoResult; -use anyhow::Context; -use log::debug; -use std::collections::{BTreeMap, HashSet}; -use termcolor::Color::{self, Cyan, Green, Red}; - -pub struct UpdateOptions<'a> { - pub config: &'a Config, - pub to_update: Vec, - pub precise: Option<&'a str>, - pub aggressive: bool, - pub dry_run: bool, - pub workspace: bool, -} - -pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> { - let mut registry = PackageRegistry::new(ws.config())?; - let mut resolve = ops::resolve_with_previous( - &mut registry, - ws, - &CliFeatures::new_all(true), - HasDevUnits::Yes, - None, - None, - &[], - true, - )?; - ops::write_pkg_lockfile(ws, &mut resolve)?; - Ok(()) -} - -pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoResult<()> { - if opts.aggressive && opts.precise.is_some() { - anyhow::bail!("cannot specify both aggressive and precise simultaneously") - } - - if ws.members().count() == 0 { - anyhow::bail!("you can't generate a lockfile for an empty workspace.") - } - - // Updates often require a lot of modifications to the registry, so ensure - // that we're synchronized against other Cargos. - let _lock = ws.config().acquire_package_cache_lock()?; - - let previous_resolve = match ops::load_pkg_lockfile(ws)? { - Some(resolve) => resolve, - None => { - match opts.precise { - None => return generate_lockfile(ws), - - // Precise option specified, so calculate a previous_resolve required - // by precise package update later. - Some(_) => { - let mut registry = PackageRegistry::new(opts.config)?; - ops::resolve_with_previous( - &mut registry, - ws, - &CliFeatures::new_all(true), - HasDevUnits::Yes, - None, - None, - &[], - true, - )? - } - } - } - }; - let mut registry = PackageRegistry::new(opts.config)?; - let mut to_avoid = HashSet::new(); - - if opts.to_update.is_empty() { - if !opts.workspace { - to_avoid.extend(previous_resolve.iter()); - to_avoid.extend(previous_resolve.unused_patches()); - } - } else { - let mut sources = Vec::new(); - for name in opts.to_update.iter() { - let dep = previous_resolve.query(name)?; - if opts.aggressive { - fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new()); - } else { - to_avoid.insert(dep); - sources.push(match opts.precise { - Some(precise) => { - // TODO: see comment in `resolve.rs` as well, but this - // seems like a pretty hokey reason to single out - // the registry as well. - let precise = if dep.source_id().is_registry() { - semver::Version::parse(precise).with_context(|| { - format!("invalid version format for precise version `{}`", precise) - })?; - format!("{}={}->{}", dep.name(), dep.version(), precise) - } else { - precise.to_string() - }; - dep.source_id().with_precise(Some(precise)) - } - None => dep.source_id().with_precise(None), - }); - } - if let Ok(unused_id) = - PackageIdSpec::query_str(name, previous_resolve.unused_patches().iter().cloned()) - { - to_avoid.insert(unused_id); - } - } - - registry.add_sources(sources)?; - } - - let mut resolve = ops::resolve_with_previous( - &mut registry, - ws, - &CliFeatures::new_all(true), - HasDevUnits::Yes, - Some(&previous_resolve), - Some(&to_avoid), - &[], - true, - )?; - - // Summarize what is changing for the user. - let print_change = |status: &str, msg: String, color: Color| { - opts.config.shell().status_with_color(status, msg, color) - }; - for (removed, added) in compare_dependency_graphs(&previous_resolve, &resolve) { - if removed.len() == 1 && added.len() == 1 { - let msg = if removed[0].source_id().is_git() { - format!( - "{} -> #{}", - removed[0], - &added[0].source_id().precise().unwrap()[..8] - ) - } else { - format!("{} -> v{}", removed[0], added[0].version()) - }; - print_change("Updating", msg, Green)?; - } else { - for package in removed.iter() { - print_change("Removing", format!("{}", package), Red)?; - } - for package in added.iter() { - print_change("Adding", format!("{}", package), Cyan)?; - } - } - } - if opts.dry_run { - opts.config - .shell() - .warn("not updating lockfile due to dry run")?; - } else { - ops::write_pkg_lockfile(ws, &mut resolve)?; - } - return Ok(()); - - fn fill_with_deps<'a>( - resolve: &'a Resolve, - dep: PackageId, - set: &mut HashSet, - visited: &mut HashSet, - ) { - if !visited.insert(dep) { - return; - } - set.insert(dep); - for (dep, _) in resolve.deps_not_replaced(dep) { - fill_with_deps(resolve, dep, set, visited); - } - } - - fn compare_dependency_graphs( - previous_resolve: &Resolve, - resolve: &Resolve, - ) -> Vec<(Vec, Vec)> { - fn key(dep: PackageId) -> (&'static str, SourceId) { - (dep.name().as_str(), dep.source_id()) - } - - // Removes all package IDs in `b` from `a`. Note that this is somewhat - // more complicated because the equality for source IDs does not take - // precise versions into account (e.g., git shas), but we want to take - // that into account here. - fn vec_subtract(a: &[PackageId], b: &[PackageId]) -> Vec { - a.iter() - .filter(|a| { - // If this package ID is not found in `b`, then it's definitely - // in the subtracted set. - let i = match b.binary_search(a) { - Ok(i) => i, - Err(..) => return true, - }; - - // If we've found `a` in `b`, then we iterate over all instances - // (we know `b` is sorted) and see if they all have different - // precise versions. If so, then `a` isn't actually in `b` so - // we'll let it through. - // - // Note that we only check this for non-registry sources, - // however, as registries contain enough version information in - // the package ID to disambiguate. - if a.source_id().is_registry() { - return false; - } - b[i..] - .iter() - .take_while(|b| a == b) - .all(|b| a.source_id().precise() != b.source_id().precise()) - }) - .cloned() - .collect() - } - - // Map `(package name, package source)` to `(removed versions, added versions)`. - let mut changes = BTreeMap::new(); - let empty = (Vec::new(), Vec::new()); - for dep in previous_resolve.iter() { - changes - .entry(key(dep)) - .or_insert_with(|| empty.clone()) - .0 - .push(dep); - } - for dep in resolve.iter() { - changes - .entry(key(dep)) - .or_insert_with(|| empty.clone()) - .1 - .push(dep); - } - - for v in changes.values_mut() { - let (ref mut old, ref mut new) = *v; - old.sort(); - new.sort(); - let removed = vec_subtract(old, new); - let added = vec_subtract(new, old); - *old = removed; - *new = added; - } - debug!("{:#?}", changes); - - changes.into_iter().map(|(_, v)| v).collect() - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_install.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_install.rs deleted file mode 100644 index 4380d3f48..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_install.rs +++ /dev/null @@ -1,838 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use std::{env, fs}; - -use crate::core::compiler::{CompileKind, DefaultExecutor, Executor, Freshness, UnitOutput}; -use crate::core::{Dependency, Edition, Package, PackageId, Source, SourceId, Workspace}; -use crate::ops::common_for_install_and_uninstall::*; -use crate::sources::{GitSource, PathSource, SourceConfigMap}; -use crate::util::errors::CargoResult; -use crate::util::{Config, Filesystem, Rustc, ToSemver, VersionReqExt}; -use crate::{drop_println, ops}; - -use anyhow::{bail, format_err, Context as _}; -use cargo_util::paths; -use semver::VersionReq; -use tempfile::Builder as TempFileBuilder; - -struct Transaction { - bins: Vec, -} - -impl Transaction { - fn success(mut self) { - self.bins.clear(); - } -} - -impl Drop for Transaction { - fn drop(&mut self) { - for bin in self.bins.iter() { - let _ = paths::remove_file(bin); - } - } -} - -struct InstallablePackage<'cfg, 'a> { - config: &'cfg Config, - opts: &'a ops::CompileOptions, - root: Filesystem, - source_id: SourceId, - vers: Option<&'a str>, - force: bool, - no_track: bool, - - pkg: Package, - ws: Workspace<'cfg>, - rustc: Rustc, - target: String, -} - -impl<'cfg, 'a> InstallablePackage<'cfg, 'a> { - // Returns pkg to install. None if pkg is already installed - pub fn new( - config: &'cfg Config, - root: Filesystem, - map: SourceConfigMap<'_>, - krate: Option<&str>, - source_id: SourceId, - from_cwd: bool, - vers: Option<&'a str>, - opts: &'a ops::CompileOptions, - force: bool, - no_track: bool, - needs_update_if_source_is_index: bool, - ) -> CargoResult>> { - if let Some(name) = krate { - if name == "." { - bail!( - "To install the binaries for the package in current working \ - directory use `cargo install --path .`. \ - Use `cargo build` if you want to simply build the package." - ) - } - } - - let dst = root.join("bin").into_path_unlocked(); - let pkg = { - let dep = { - if let Some(krate) = krate { - let vers = if let Some(vers_flag) = vers { - Some(parse_semver_flag(vers_flag)?.to_string()) - } else if source_id.is_registry() { - // Avoid pre-release versions from crate.io - // unless explicitly asked for - Some(String::from("*")) - } else { - None - }; - Some(Dependency::parse(krate, vers.as_deref(), source_id)?) - } else { - None - } - }; - - if source_id.is_git() { - let mut source = GitSource::new(source_id, config)?; - select_pkg( - &mut source, - dep, - |git: &mut GitSource<'_>| git.read_packages(), - config, - )? - } else if source_id.is_path() { - let mut src = path_source(source_id, config)?; - if !src.path().is_dir() { - bail!( - "`{}` is not a directory. \ - --path must point to a directory containing a Cargo.toml file.", - src.path().display() - ) - } - if !src.path().join("Cargo.toml").exists() { - if from_cwd { - bail!( - "`{}` is not a crate root; specify a crate to \ - install from crates.io, or use --path or --git to \ - specify an alternate source", - src.path().display() - ); - } else if src.path().join("cargo.toml").exists() { - bail!( - "`{}` does not contain a Cargo.toml file, but found cargo.toml please try to rename it to Cargo.toml. \ - --path must point to a directory containing a Cargo.toml file.", - src.path().display() - ) - } else { - bail!( - "`{}` does not contain a Cargo.toml file. \ - --path must point to a directory containing a Cargo.toml file.", - src.path().display() - ) - } - } - select_pkg( - &mut src, - dep, - |path: &mut PathSource<'_>| path.read_packages(), - config, - )? - } else if let Some(dep) = dep { - let mut source = map.load(source_id, &HashSet::new())?; - if let Ok(Some(pkg)) = installed_exact_package( - dep.clone(), - &mut source, - config, - opts, - &root, - &dst, - force, - ) { - let msg = format!( - "package `{}` is already installed, use --force to override", - pkg - ); - config.shell().status("Ignored", &msg)?; - return Ok(None); - } - select_dep_pkg(&mut source, dep, config, needs_update_if_source_is_index)? - } else { - bail!( - "must specify a crate to install from \ - crates.io, or use --path or --git to \ - specify alternate source" - ) - } - }; - - let (ws, rustc, target) = make_ws_rustc_target(config, opts, &source_id, pkg.clone())?; - // If we're installing in --locked mode and there's no `Cargo.lock` published - // ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026 - if config.locked() && !ws.root().join("Cargo.lock").exists() { - config.shell().warn(format!( - "no Cargo.lock file published in {}", - pkg.to_string() - ))?; - } - let pkg = if source_id.is_git() { - // Don't use ws.current() in order to keep the package source as a git source so that - // install tracking uses the correct source. - pkg - } else { - ws.current()?.clone() - }; - - if from_cwd { - if pkg.manifest().edition() == Edition::Edition2015 { - config.shell().warn( - "Using `cargo install` to install the binaries for the \ - package in current working directory is deprecated, \ - use `cargo install --path .` instead. \ - Use `cargo build` if you want to simply build the package.", - )? - } else { - bail!( - "Using `cargo install` to install the binaries for the \ - package in current working directory is no longer supported, \ - use `cargo install --path .` instead. \ - Use `cargo build` if you want to simply build the package." - ) - } - }; - - // For bare `cargo install` (no `--bin` or `--example`), check if there is - // *something* to install. Explicit `--bin` or `--example` flags will be - // checked at the start of `compile_ws`. - if !opts.filter.is_specific() && !pkg.targets().iter().any(|t| t.is_bin()) { - bail!( - "there is nothing to install in `{}`, because it has no binaries\n\ - `cargo install` is only for installing programs, and can't be used with libraries.\n\ - To use a library crate, add it as a dependency in a Cargo project instead.", - pkg - ); - } - - let ip = InstallablePackage { - config, - opts, - root, - source_id, - vers, - force, - no_track, - - pkg, - ws, - rustc, - target, - }; - - // WARNING: no_track does not perform locking, so there is no protection - // of concurrent installs. - if no_track { - // Check for conflicts. - ip.no_track_duplicates(&dst)?; - } else if is_installed( - &ip.pkg, config, opts, &ip.rustc, &ip.target, &ip.root, &dst, force, - )? { - let msg = format!( - "package `{}` is already installed, use --force to override", - ip.pkg - ); - config.shell().status("Ignored", &msg)?; - return Ok(None); - } - - Ok(Some(ip)) - } - - fn no_track_duplicates(&self, dst: &Path) -> CargoResult>> { - // Helper for --no-track flag to make sure it doesn't overwrite anything. - let duplicates: BTreeMap> = - exe_names(&self.pkg, &self.opts.filter) - .into_iter() - .filter(|name| dst.join(name).exists()) - .map(|name| (name, None)) - .collect(); - if !self.force && !duplicates.is_empty() { - let mut msg: Vec = duplicates - .iter() - .map(|(name, _)| { - format!( - "binary `{}` already exists in destination `{}`", - name, - dst.join(name).to_string_lossy() - ) - }) - .collect(); - msg.push("Add --force to overwrite".to_string()); - bail!("{}", msg.join("\n")); - } - Ok(duplicates) - } - - fn install_one(mut self) -> CargoResult<()> { - self.config.shell().status("Installing", &self.pkg)?; - - let dst = self.root.join("bin").into_path_unlocked(); - - let mut td_opt = None; - let mut needs_cleanup = false; - if !self.source_id.is_path() { - let target_dir = if let Some(dir) = self.config.target_dir()? { - dir - } else if let Ok(td) = TempFileBuilder::new().prefix("cargo-install").tempdir() { - let p = td.path().to_owned(); - td_opt = Some(td); - Filesystem::new(p) - } else { - needs_cleanup = true; - Filesystem::new(self.config.cwd().join("target-install")) - }; - self.ws.set_target_dir(target_dir); - } - - self.check_yanked_install()?; - - let exec: Arc = Arc::new(DefaultExecutor); - let compile = ops::compile_ws(&self.ws, self.opts, &exec).with_context(|| { - if let Some(td) = td_opt.take() { - // preserve the temporary directory, so the user can inspect it - td.into_path(); - } - - format!( - "failed to compile `{}`, intermediate artifacts can be \ - found at `{}`", - self.pkg, - self.ws.target_dir().display() - ) - })?; - let mut binaries: Vec<(&str, &Path)> = compile - .binaries - .iter() - .map(|UnitOutput { path, .. }| { - let name = path.file_name().unwrap(); - if let Some(s) = name.to_str() { - Ok((s, path.as_ref())) - } else { - bail!("Binary `{:?}` name can't be serialized into string", name) - } - }) - .collect::>()?; - if binaries.is_empty() { - bail!("no binaries are available for install using the selected features"); - } - // This is primarily to make testing easier. - binaries.sort_unstable(); - - let (tracker, duplicates) = if self.no_track { - (None, self.no_track_duplicates(&dst)?) - } else { - let tracker = InstallTracker::load(self.config, &self.root)?; - let (_freshness, duplicates) = tracker.check_upgrade( - &dst, - &self.pkg, - self.force, - self.opts, - &self.target, - &self.rustc.verbose_version, - )?; - (Some(tracker), duplicates) - }; - - paths::create_dir_all(&dst)?; - - // Copy all binaries to a temporary directory under `dst` first, catching - // some failure modes (e.g., out of space) before touching the existing - // binaries. This directory will get cleaned up via RAII. - let staging_dir = TempFileBuilder::new() - .prefix("cargo-install") - .tempdir_in(&dst)?; - for &(bin, src) in binaries.iter() { - let dst = staging_dir.path().join(bin); - // Try to move if `target_dir` is transient. - if !self.source_id.is_path() && fs::rename(src, &dst).is_ok() { - continue; - } - paths::copy(src, &dst)?; - } - - let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries - .iter() - .map(|&(bin, _)| bin) - .partition(|&bin| duplicates.contains_key(bin)); - - let mut installed = Transaction { bins: Vec::new() }; - let mut successful_bins = BTreeSet::new(); - - // Move the temporary copies into `dst` starting with new binaries. - for bin in to_install.iter() { - let src = staging_dir.path().join(bin); - let dst = dst.join(bin); - self.config.shell().status("Installing", dst.display())?; - fs::rename(&src, &dst).with_context(|| { - format!("failed to move `{}` to `{}`", src.display(), dst.display()) - })?; - installed.bins.push(dst); - successful_bins.insert(bin.to_string()); - } - - // Repeat for binaries which replace existing ones but don't pop the error - // up until after updating metadata. - let replace_result = { - let mut try_install = || -> CargoResult<()> { - for &bin in to_replace.iter() { - let src = staging_dir.path().join(bin); - let dst = dst.join(bin); - self.config.shell().status("Replacing", dst.display())?; - fs::rename(&src, &dst).with_context(|| { - format!("failed to move `{}` to `{}`", src.display(), dst.display()) - })?; - successful_bins.insert(bin.to_string()); - } - Ok(()) - }; - try_install() - }; - - if let Some(mut tracker) = tracker { - tracker.mark_installed( - &self.pkg, - &successful_bins, - self.vers.map(|s| s.to_string()), - self.opts, - &self.target, - &self.rustc.verbose_version, - ); - - if let Err(e) = - remove_orphaned_bins(&self.ws, &mut tracker, &duplicates, &self.pkg, &dst) - { - // Don't hard error on remove. - self.config - .shell() - .warn(format!("failed to remove orphan: {:?}", e))?; - } - - match tracker.save() { - Err(err) => replace_result.with_context(|| err)?, - Ok(_) => replace_result?, - } - } - - // Reaching here means all actions have succeeded. Clean up. - installed.success(); - if needs_cleanup { - // Don't bother grabbing a lock as we're going to blow it all away - // anyway. - let target_dir = self.ws.target_dir().into_path_unlocked(); - paths::remove_dir_all(&target_dir)?; - } - - // Helper for creating status messages. - fn executables>(mut names: impl Iterator + Clone) -> String { - if names.clone().count() == 1 { - format!("(executable `{}`)", names.next().unwrap().as_ref()) - } else { - format!( - "(executables {})", - names - .map(|b| format!("`{}`", b.as_ref())) - .collect::>() - .join(", ") - ) - } - } - - if duplicates.is_empty() { - self.config.shell().status( - "Installed", - format!( - "package `{}` {}", - self.pkg, - executables(successful_bins.iter()) - ), - )?; - Ok(()) - } else { - if !to_install.is_empty() { - self.config.shell().status( - "Installed", - format!("package `{}` {}", self.pkg, executables(to_install.iter())), - )?; - } - // Invert the duplicate map. - let mut pkg_map = BTreeMap::new(); - for (bin_name, opt_pkg_id) in &duplicates { - let key = - opt_pkg_id.map_or_else(|| "unknown".to_string(), |pkg_id| pkg_id.to_string()); - pkg_map.entry(key).or_insert_with(Vec::new).push(bin_name); - } - for (pkg_descr, bin_names) in &pkg_map { - self.config.shell().status( - "Replaced", - format!( - "package `{}` with `{}` {}", - pkg_descr, - self.pkg, - executables(bin_names.iter()) - ), - )?; - } - Ok(()) - } - } - - fn check_yanked_install(&self) -> CargoResult<()> { - if self.ws.ignore_lock() || !self.ws.root().join("Cargo.lock").exists() { - return Ok(()); - } - // It would be best if `source` could be passed in here to avoid a - // duplicate "Updating", but since `source` is taken by value, then it - // wouldn't be available for `compile_ws`. - let (pkg_set, resolve) = ops::resolve_ws(&self.ws)?; - let mut sources = pkg_set.sources_mut(); - - // Checking the yanked status involves taking a look at the registry and - // maybe updating files, so be sure to lock it here. - let _lock = self.ws.config().acquire_package_cache_lock()?; - - for pkg_id in resolve.iter() { - if let Some(source) = sources.get_mut(pkg_id.source_id()) { - if source.is_yanked(pkg_id)? { - self.ws.config().shell().warn(format!( - "package `{}` in Cargo.lock is yanked in registry `{}`, \ - consider running without --locked", - pkg_id, - pkg_id.source_id().display_registry_name() - ))?; - } - } - } - - Ok(()) - } -} - -pub fn install( - config: &Config, - root: Option<&str>, - krates: Vec<&str>, - source_id: SourceId, - from_cwd: bool, - vers: Option<&str>, - opts: &ops::CompileOptions, - force: bool, - no_track: bool, -) -> CargoResult<()> { - let root = resolve_root(root, config)?; - let dst = root.join("bin").into_path_unlocked(); - let map = SourceConfigMap::new(config)?; - - let (installed_anything, scheduled_error) = if krates.len() <= 1 { - let installable_pkg = InstallablePackage::new( - config, - root, - map, - krates.into_iter().next(), - source_id, - from_cwd, - vers, - opts, - force, - no_track, - true, - )?; - if let Some(installable_pkg) = installable_pkg { - installable_pkg.install_one()?; - } - (true, false) - } else { - let mut succeeded = vec![]; - let mut failed = vec![]; - // "Tracks whether or not the source (such as a registry or git repo) has been updated. - // This is used to avoid updating it multiple times when installing multiple crates. - let mut did_update = false; - - let pkgs_to_install: Vec<_> = krates - .into_iter() - .filter_map(|krate| { - let root = root.clone(); - let map = map.clone(); - match InstallablePackage::new( - config, - root, - map, - Some(krate), - source_id, - from_cwd, - vers, - opts, - force, - no_track, - !did_update, - ) { - Ok(Some(installable_pkg)) => { - did_update = true; - Some((krate, installable_pkg)) - } - Ok(None) => { - // Already installed - succeeded.push(krate); - None - } - Err(e) => { - crate::display_error(&e, &mut config.shell()); - failed.push(krate); - // We assume an update was performed if we got an error. - did_update = true; - None - } - } - }) - .collect(); - - let install_results: Vec<_> = pkgs_to_install - .into_iter() - .map(|(krate, installable_pkg)| (krate, installable_pkg.install_one())) - .collect(); - - for (krate, result) in install_results { - match result { - Ok(()) => { - succeeded.push(krate); - } - Err(e) => { - crate::display_error(&e, &mut config.shell()); - failed.push(krate); - } - } - } - - let mut summary = vec![]; - if !succeeded.is_empty() { - summary.push(format!("Successfully installed {}!", succeeded.join(", "))); - } - if !failed.is_empty() { - summary.push(format!( - "Failed to install {} (see error(s) above).", - failed.join(", ") - )); - } - if !succeeded.is_empty() || !failed.is_empty() { - config.shell().status("Summary", summary.join(" "))?; - } - - (!succeeded.is_empty(), !failed.is_empty()) - }; - - if installed_anything { - // Print a warning that if this directory isn't in PATH that they won't be - // able to run these commands. - let path = env::var_os("PATH").unwrap_or_default(); - let dst_in_path = env::split_paths(&path).any(|path| path == dst); - - if !dst_in_path { - config.shell().warn(&format!( - "be sure to add `{}` to your PATH to be \ - able to run the installed binaries", - dst.display() - ))?; - } - } - - if scheduled_error { - bail!("some crates failed to install"); - } - - Ok(()) -} - -fn is_installed( - pkg: &Package, - config: &Config, - opts: &ops::CompileOptions, - rustc: &Rustc, - target: &str, - root: &Filesystem, - dst: &Path, - force: bool, -) -> CargoResult { - let tracker = InstallTracker::load(config, root)?; - let (freshness, _duplicates) = - tracker.check_upgrade(dst, pkg, force, opts, target, &rustc.verbose_version)?; - Ok(freshness == Freshness::Fresh) -} - -/// Checks if vers can only be satisfied by exactly one version of a package in a registry, and it's -/// already installed. If this is the case, we can skip interacting with a registry to check if -/// newer versions may be installable, as no newer version can exist. -fn installed_exact_package( - dep: Dependency, - source: &mut T, - config: &Config, - opts: &ops::CompileOptions, - root: &Filesystem, - dst: &Path, - force: bool, -) -> CargoResult> -where - T: Source, -{ - if !dep.version_req().is_exact() { - // If the version isn't exact, we may need to update the registry and look for a newer - // version - we can't know if the package is installed without doing so. - return Ok(None); - } - // Try getting the package from the registry without updating it, to avoid a potentially - // expensive network call in the case that the package is already installed. - // If this fails, the caller will possibly do an index update and try again, this is just a - // best-effort check to see if we can avoid hitting the network. - if let Ok(pkg) = select_dep_pkg(source, dep, config, false) { - let (_ws, rustc, target) = - make_ws_rustc_target(config, opts, &source.source_id(), pkg.clone())?; - if let Ok(true) = is_installed(&pkg, config, opts, &rustc, &target, root, dst, force) { - return Ok(Some(pkg)); - } - } - Ok(None) -} - -fn make_ws_rustc_target<'cfg>( - config: &'cfg Config, - opts: &ops::CompileOptions, - source_id: &SourceId, - pkg: Package, -) -> CargoResult<(Workspace<'cfg>, Rustc, String)> { - let mut ws = if source_id.is_git() || source_id.is_path() { - Workspace::new(pkg.manifest_path(), config)? - } else { - Workspace::ephemeral(pkg, config, None, false)? - }; - ws.set_ignore_lock(config.lock_update_allowed()); - ws.set_require_optional_deps(false); - - let rustc = config.load_global_rustc(Some(&ws))?; - let target = match &opts.build_config.single_requested_kind()? { - CompileKind::Host => rustc.host.as_str().to_owned(), - CompileKind::Target(target) => target.short_name().to_owned(), - }; - - Ok((ws, rustc, target)) -} - -/// Parses x.y.z as if it were =x.y.z, and gives CLI-specific error messages in the case of invalid -/// values. -fn parse_semver_flag(v: &str) -> CargoResult { - // If the version begins with character <, >, =, ^, ~ parse it as a - // version range, otherwise parse it as a specific version - let first = v - .chars() - .next() - .ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?; - - let is_req = "<>=^~".contains(first) || v.contains('*'); - if is_req { - match v.parse::() { - Ok(v) => Ok(v), - Err(_) => bail!( - "the `--vers` provided, `{}`, is \ - not a valid semver version requirement\n\n\ - Please have a look at \ - https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html \ - for the correct format", - v - ), - } - } else { - match v.to_semver() { - Ok(v) => Ok(VersionReq::exact(&v)), - Err(e) => { - let mut msg = format!( - "the `--vers` provided, `{}`, is \ - not a valid semver version: {}\n", - v, e - ); - - // If it is not a valid version but it is a valid version - // requirement, add a note to the warning - if v.parse::().is_ok() { - msg.push_str(&format!( - "\nif you want to specify semver range, \ - add an explicit qualifier, like ^{}", - v - )); - } - bail!(msg); - } - } - } -} - -/// Display a list of installed binaries. -pub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> { - let root = resolve_root(dst, config)?; - let tracker = InstallTracker::load(config, &root)?; - for (k, v) in tracker.all_installed_bins() { - drop_println!(config, "{}:", k); - for bin in v { - drop_println!(config, " {}", bin); - } - } - Ok(()) -} - -/// Removes executables that are no longer part of a package that was -/// previously installed. -fn remove_orphaned_bins( - ws: &Workspace<'_>, - tracker: &mut InstallTracker, - duplicates: &BTreeMap>, - pkg: &Package, - dst: &Path, -) -> CargoResult<()> { - let filter = ops::CompileFilter::new_all_targets(); - let all_self_names = exe_names(pkg, &filter); - let mut to_remove: HashMap> = HashMap::new(); - // For each package that we stomped on. - for other_pkg in duplicates.values().flatten() { - // Only for packages with the same name. - if other_pkg.name() == pkg.name() { - // Check what the old package had installed. - if let Some(installed) = tracker.installed_bins(*other_pkg) { - // If the old install has any names that no longer exist, - // add them to the list to remove. - for installed_name in installed { - if !all_self_names.contains(installed_name.as_str()) { - to_remove - .entry(*other_pkg) - .or_default() - .insert(installed_name.clone()); - } - } - } - } - } - - for (old_pkg, bins) in to_remove { - tracker.remove(old_pkg, &bins); - for bin in bins { - let full_path = dst.join(bin); - if full_path.exists() { - ws.config().shell().status( - "Removing", - format!( - "executable `{}` from previous version {}", - full_path.display(), - old_pkg - ), - )?; - paths::remove_file(&full_path) - .with_context(|| format!("failed to remove {:?}", full_path))?; - } - } - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_new.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_new.rs deleted file mode 100644 index 201142809..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_new.rs +++ /dev/null @@ -1,847 +0,0 @@ -use crate::core::{Edition, Shell, Workspace}; -use crate::util::errors::CargoResult; -use crate::util::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; -use crate::util::{restricted_names, Config}; -use anyhow::Context as _; -use cargo_util::paths; -use serde::de; -use serde::Deserialize; -use std::collections::BTreeMap; -use std::fmt; -use std::io::{BufRead, BufReader, ErrorKind}; -use std::path::{Path, PathBuf}; -use std::process::Command; -use std::str::{from_utf8, FromStr}; - -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum VersionControl { - Git, - Hg, - Pijul, - Fossil, - NoVcs, -} - -impl FromStr for VersionControl { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - match s { - "git" => Ok(VersionControl::Git), - "hg" => Ok(VersionControl::Hg), - "pijul" => Ok(VersionControl::Pijul), - "fossil" => Ok(VersionControl::Fossil), - "none" => Ok(VersionControl::NoVcs), - other => anyhow::bail!("unknown vcs specification: `{}`", other), - } - } -} - -impl<'de> de::Deserialize<'de> for VersionControl { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - let s = String::deserialize(deserializer)?; - FromStr::from_str(&s).map_err(de::Error::custom) - } -} - -#[derive(Debug)] -pub struct NewOptions { - pub version_control: Option, - pub kind: NewProjectKind, - pub auto_detect_kind: bool, - /// Absolute path to the directory for the new package - pub path: PathBuf, - pub name: Option, - pub edition: Option, - pub registry: Option, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub enum NewProjectKind { - Bin, - Lib, -} - -impl NewProjectKind { - fn is_bin(self) -> bool { - self == NewProjectKind::Bin - } -} - -impl fmt::Display for NewProjectKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - NewProjectKind::Bin => "binary (application)", - NewProjectKind::Lib => "library", - } - .fmt(f) - } -} - -struct SourceFileInformation { - relative_path: String, - target_name: String, - bin: bool, -} - -struct MkOptions<'a> { - version_control: Option, - path: &'a Path, - name: &'a str, - source_files: Vec, - bin: bool, - edition: Option<&'a str>, - registry: Option<&'a str>, -} - -impl NewOptions { - pub fn new( - version_control: Option, - bin: bool, - lib: bool, - path: PathBuf, - name: Option, - edition: Option, - registry: Option, - ) -> CargoResult { - let auto_detect_kind = !bin && !lib; - - let kind = match (bin, lib) { - (true, true) => anyhow::bail!("can't specify both lib and binary outputs"), - (false, true) => NewProjectKind::Lib, - (_, false) => NewProjectKind::Bin, - }; - - let opts = NewOptions { - version_control, - kind, - auto_detect_kind, - path, - name, - edition, - registry, - }; - Ok(opts) - } -} - -#[derive(Deserialize)] -struct CargoNewConfig { - #[deprecated = "cargo-new no longer supports adding the authors field"] - #[allow(dead_code)] - name: Option, - - #[deprecated = "cargo-new no longer supports adding the authors field"] - #[allow(dead_code)] - email: Option, - - #[serde(rename = "vcs")] - version_control: Option, -} - -fn get_name<'a>(path: &'a Path, opts: &'a NewOptions) -> CargoResult<&'a str> { - if let Some(ref name) = opts.name { - return Ok(name); - } - - let file_name = path.file_name().ok_or_else(|| { - anyhow::format_err!( - "cannot auto-detect package name from path {:?} ; use --name to override", - path.as_os_str() - ) - })?; - - file_name.to_str().ok_or_else(|| { - anyhow::format_err!( - "cannot create package with a non-unicode name: {:?}", - file_name - ) - }) -} - -fn check_name( - name: &str, - show_name_help: bool, - has_bin: bool, - shell: &mut Shell, -) -> CargoResult<()> { - // If --name is already used to override, no point in suggesting it - // again as a fix. - let name_help = if show_name_help { - "\nIf you need a package name to not match the directory name, consider using --name flag." - } else { - "" - }; - let bin_help = || { - let mut help = String::from(name_help); - if has_bin { - help.push_str(&format!( - "\n\ - If you need a binary with the name \"{name}\", use a valid package \ - name, and set the binary name to be different from the package. \ - This can be done by setting the binary filename to `src/bin/{name}.rs` \ - or change the name in Cargo.toml with:\n\ - \n \ - [[bin]]\n \ - name = \"{name}\"\n \ - path = \"src/main.rs\"\n\ - ", - name = name - )); - } - help - }; - restricted_names::validate_package_name(name, "package name", &bin_help())?; - - if restricted_names::is_keyword(name) { - anyhow::bail!( - "the name `{}` cannot be used as a package name, it is a Rust keyword{}", - name, - bin_help() - ); - } - if restricted_names::is_conflicting_artifact_name(name) { - if has_bin { - anyhow::bail!( - "the name `{}` cannot be used as a package name, \ - it conflicts with cargo's build directory names{}", - name, - name_help - ); - } else { - shell.warn(format!( - "the name `{}` will not support binary \ - executables with that name, \ - it conflicts with cargo's build directory names", - name - ))?; - } - } - if name == "test" { - anyhow::bail!( - "the name `test` cannot be used as a package name, \ - it conflicts with Rust's built-in test library{}", - bin_help() - ); - } - if ["core", "std", "alloc", "proc_macro", "proc-macro"].contains(&name) { - shell.warn(format!( - "the name `{}` is part of Rust's standard library\n\ - It is recommended to use a different name to avoid problems.{}", - name, - bin_help() - ))?; - } - if restricted_names::is_windows_reserved(name) { - if cfg!(windows) { - anyhow::bail!( - "cannot use name `{}`, it is a reserved Windows filename{}", - name, - name_help - ); - } else { - shell.warn(format!( - "the name `{}` is a reserved Windows filename\n\ - This package will not work on Windows platforms.", - name - ))?; - } - } - if restricted_names::is_non_ascii_name(name) { - shell.warn(format!( - "the name `{}` contains non-ASCII characters\n\ - Support for non-ASCII crate names is experimental and only valid \ - on the nightly toolchain.", - name - ))?; - } - - Ok(()) -} - -fn detect_source_paths_and_types( - package_path: &Path, - package_name: &str, - detected_files: &mut Vec, -) -> CargoResult<()> { - let path = package_path; - let name = package_name; - - enum H { - Bin, - Lib, - Detect, - } - - struct Test { - proposed_path: String, - handling: H, - } - - let tests = vec![ - Test { - proposed_path: "src/main.rs".to_string(), - handling: H::Bin, - }, - Test { - proposed_path: "main.rs".to_string(), - handling: H::Bin, - }, - Test { - proposed_path: format!("src/{}.rs", name), - handling: H::Detect, - }, - Test { - proposed_path: format!("{}.rs", name), - handling: H::Detect, - }, - Test { - proposed_path: "src/lib.rs".to_string(), - handling: H::Lib, - }, - Test { - proposed_path: "lib.rs".to_string(), - handling: H::Lib, - }, - ]; - - for i in tests { - let pp = i.proposed_path; - - // path/pp does not exist or is not a file - if !path.join(&pp).is_file() { - continue; - } - - let sfi = match i.handling { - H::Bin => SourceFileInformation { - relative_path: pp, - target_name: package_name.to_string(), - bin: true, - }, - H::Lib => SourceFileInformation { - relative_path: pp, - target_name: package_name.to_string(), - bin: false, - }, - H::Detect => { - let content = paths::read(&path.join(pp.clone()))?; - let isbin = content.contains("fn main"); - SourceFileInformation { - relative_path: pp, - target_name: package_name.to_string(), - bin: isbin, - } - } - }; - detected_files.push(sfi); - } - - // Check for duplicate lib attempt - - let mut previous_lib_relpath: Option<&str> = None; - let mut duplicates_checker: BTreeMap<&str, &SourceFileInformation> = BTreeMap::new(); - - for i in detected_files { - if i.bin { - if let Some(x) = BTreeMap::get::(&duplicates_checker, i.target_name.as_ref()) { - anyhow::bail!( - "\ -multiple possible binary sources found: - {} - {} -cannot automatically generate Cargo.toml as the main target would be ambiguous", - &x.relative_path, - &i.relative_path - ); - } - duplicates_checker.insert(i.target_name.as_ref(), i); - } else { - if let Some(plp) = previous_lib_relpath { - anyhow::bail!( - "cannot have a package with \ - multiple libraries, \ - found both `{}` and `{}`", - plp, - i.relative_path - ) - } - previous_lib_relpath = Some(&i.relative_path); - } - } - - Ok(()) -} - -fn plan_new_source_file(bin: bool, package_name: String) -> SourceFileInformation { - if bin { - SourceFileInformation { - relative_path: "src/main.rs".to_string(), - target_name: package_name, - bin: true, - } - } else { - SourceFileInformation { - relative_path: "src/lib.rs".to_string(), - target_name: package_name, - bin: false, - } - } -} - -fn calculate_new_project_kind( - requested_kind: NewProjectKind, - auto_detect_kind: bool, - found_files: &Vec, -) -> NewProjectKind { - let bin_file = found_files.iter().find(|x| x.bin); - - let kind_from_files = if !found_files.is_empty() && bin_file.is_none() { - NewProjectKind::Lib - } else { - NewProjectKind::Bin - }; - - if auto_detect_kind { - return kind_from_files; - } - - requested_kind -} - -pub fn new(opts: &NewOptions, config: &Config) -> CargoResult<()> { - let path = &opts.path; - if path.exists() { - anyhow::bail!( - "destination `{}` already exists\n\n\ - Use `cargo init` to initialize the directory", - path.display() - ) - } - - let is_bin = opts.kind.is_bin(); - - let name = get_name(path, opts)?; - check_name(name, opts.name.is_none(), is_bin, &mut config.shell())?; - - let mkopts = MkOptions { - version_control: opts.version_control, - path, - name, - source_files: vec![plan_new_source_file(opts.kind.is_bin(), name.to_string())], - bin: is_bin, - edition: opts.edition.as_deref(), - registry: opts.registry.as_deref(), - }; - - mk(config, &mkopts).with_context(|| { - format!( - "Failed to create package `{}` at `{}`", - name, - path.display() - ) - })?; - Ok(()) -} - -pub fn init(opts: &NewOptions, config: &Config) -> CargoResult { - // This is here just as a random location to exercise the internal error handling. - if std::env::var_os("__CARGO_TEST_INTERNAL_ERROR").is_some() { - return Err(crate::util::internal("internal error test")); - } - - let path = &opts.path; - - if path.join("Cargo.toml").exists() { - anyhow::bail!("`cargo init` cannot be run on existing Cargo packages") - } - - let name = get_name(path, opts)?; - - let mut src_paths_types = vec![]; - - detect_source_paths_and_types(path, name, &mut src_paths_types)?; - - let kind = calculate_new_project_kind(opts.kind, opts.auto_detect_kind, &src_paths_types); - let has_bin = kind.is_bin(); - - if src_paths_types.is_empty() { - src_paths_types.push(plan_new_source_file(has_bin, name.to_string())); - } else if src_paths_types.len() == 1 && !src_paths_types.iter().any(|x| x.bin == has_bin) { - // we've found the only file and it's not the type user wants. Change the type and warn - let file_type = if src_paths_types[0].bin { - NewProjectKind::Bin - } else { - NewProjectKind::Lib - }; - config.shell().warn(format!( - "file `{}` seems to be a {} file", - src_paths_types[0].relative_path, file_type - ))?; - src_paths_types[0].bin = has_bin - } else if src_paths_types.len() > 1 && !has_bin { - // We have found both lib and bin files and the user would like us to treat both as libs - anyhow::bail!( - "cannot have a package with \ - multiple libraries, \ - found both `{}` and `{}`", - src_paths_types[0].relative_path, - src_paths_types[1].relative_path - ) - } - - check_name(name, opts.name.is_none(), has_bin, &mut config.shell())?; - - let mut version_control = opts.version_control; - - if version_control == None { - let mut num_detected_vsces = 0; - - if path.join(".git").exists() { - version_control = Some(VersionControl::Git); - num_detected_vsces += 1; - } - - if path.join(".hg").exists() { - version_control = Some(VersionControl::Hg); - num_detected_vsces += 1; - } - - if path.join(".pijul").exists() { - version_control = Some(VersionControl::Pijul); - num_detected_vsces += 1; - } - - if path.join(".fossil").exists() { - version_control = Some(VersionControl::Fossil); - num_detected_vsces += 1; - } - - // if none exists, maybe create git, like in `cargo new` - - if num_detected_vsces > 1 { - anyhow::bail!( - "more than one of .hg, .git, .pijul, .fossil configurations \ - found and the ignore file can't be filled in as \ - a result. specify --vcs to override detection" - ); - } - } - - let mkopts = MkOptions { - version_control, - path, - name, - bin: has_bin, - source_files: src_paths_types, - edition: opts.edition.as_deref(), - registry: opts.registry.as_deref(), - }; - - mk(config, &mkopts).with_context(|| { - format!( - "Failed to create package `{}` at `{}`", - name, - path.display() - ) - })?; - Ok(kind) -} - -/// IgnoreList -struct IgnoreList { - /// git like formatted entries - ignore: Vec, - /// mercurial formatted entries - hg_ignore: Vec, - /// Fossil-formatted entries. - fossil_ignore: Vec, -} - -impl IgnoreList { - /// constructor to build a new ignore file - fn new() -> IgnoreList { - IgnoreList { - ignore: Vec::new(), - hg_ignore: Vec::new(), - fossil_ignore: Vec::new(), - } - } - - /// Add a new entry to the ignore list. Requires three arguments with the - /// entry in possibly three different formats. One for "git style" entries, - /// one for "mercurial style" entries and one for "fossil style" entries. - fn push(&mut self, ignore: &str, hg_ignore: &str, fossil_ignore: &str) { - self.ignore.push(ignore.to_string()); - self.hg_ignore.push(hg_ignore.to_string()); - self.fossil_ignore.push(fossil_ignore.to_string()); - } - - /// Return the correctly formatted content of the ignore file for the given - /// version control system as `String`. - fn format_new(&self, vcs: VersionControl) -> String { - let ignore_items = match vcs { - VersionControl::Hg => &self.hg_ignore, - VersionControl::Fossil => &self.fossil_ignore, - _ => &self.ignore, - }; - - ignore_items.join("\n") + "\n" - } - - /// format_existing is used to format the IgnoreList when the ignore file - /// already exists. It reads the contents of the given `BufRead` and - /// checks if the contents of the ignore list are already existing in the - /// file. - fn format_existing(&self, existing: T, vcs: VersionControl) -> String { - // TODO: is unwrap safe? - let existing_items = existing.lines().collect::, _>>().unwrap(); - - let ignore_items = match vcs { - VersionControl::Hg => &self.hg_ignore, - VersionControl::Fossil => &self.fossil_ignore, - _ => &self.ignore, - }; - - let mut out = String::new(); - - // Fossil does not support `#` comments. - if vcs != VersionControl::Fossil { - out.push_str("\n\n# Added by cargo\n"); - if ignore_items - .iter() - .any(|item| existing_items.contains(item)) - { - out.push_str("#\n# already existing elements were commented out\n"); - } - out.push('\n'); - } - - for item in ignore_items { - if existing_items.contains(item) { - if vcs == VersionControl::Fossil { - // Just merge for Fossil. - continue; - } - out.push('#'); - } - out.push_str(item); - out.push('\n'); - } - - out - } -} - -/// Writes the ignore file to the given directory. If the ignore file for the -/// given vcs system already exists, its content is read and duplicate ignore -/// file entries are filtered out. -fn write_ignore_file(base_path: &Path, list: &IgnoreList, vcs: VersionControl) -> CargoResult<()> { - // Fossil only supports project-level settings in a dedicated subdirectory. - if vcs == VersionControl::Fossil { - paths::create_dir_all(base_path.join(".fossil-settings"))?; - } - - for fp_ignore in match vcs { - VersionControl::Git => vec![base_path.join(".gitignore")], - VersionControl::Hg => vec![base_path.join(".hgignore")], - VersionControl::Pijul => vec![base_path.join(".ignore")], - // Fossil has a cleaning functionality configured in a separate file. - VersionControl::Fossil => vec![ - base_path.join(".fossil-settings/ignore-glob"), - base_path.join(".fossil-settings/clean-glob"), - ], - VersionControl::NoVcs => return Ok(()), - } { - let ignore: String = match paths::open(&fp_ignore) { - Err(err) => match err.downcast_ref::() { - Some(io_err) if io_err.kind() == ErrorKind::NotFound => list.format_new(vcs), - _ => return Err(err), - }, - Ok(file) => list.format_existing(BufReader::new(file), vcs), - }; - - paths::append(&fp_ignore, ignore.as_bytes())?; - } - - Ok(()) -} - -/// Initializes the correct VCS system based on the provided config. -fn init_vcs(path: &Path, vcs: VersionControl, config: &Config) -> CargoResult<()> { - match vcs { - VersionControl::Git => { - if !path.join(".git").exists() { - // Temporary fix to work around bug in libgit2 when creating a - // directory in the root of a posix filesystem. - // See: https://github.com/libgit2/libgit2/issues/5130 - paths::create_dir_all(path)?; - GitRepo::init(path, config.cwd())?; - } - } - VersionControl::Hg => { - if !path.join(".hg").exists() { - HgRepo::init(path, config.cwd())?; - } - } - VersionControl::Pijul => { - if !path.join(".pijul").exists() { - PijulRepo::init(path, config.cwd())?; - } - } - VersionControl::Fossil => { - if !path.join(".fossil").exists() { - FossilRepo::init(path, config.cwd())?; - } - } - VersionControl::NoVcs => { - paths::create_dir_all(path)?; - } - }; - - Ok(()) -} - -fn mk(config: &Config, opts: &MkOptions<'_>) -> CargoResult<()> { - let path = opts.path; - let name = opts.name; - let cfg = config.get::("cargo-new")?; - - // Using the push method with multiple arguments ensures that the entries - // for all mutually-incompatible VCS in terms of syntax are in sync. - let mut ignore = IgnoreList::new(); - ignore.push("/target", "^target/", "target"); - if !opts.bin { - ignore.push("Cargo.lock", "glob:Cargo.lock", "Cargo.lock,*/Cargo.lock"); - } - - let vcs = opts.version_control.unwrap_or_else(|| { - let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd()); - match (cfg.version_control, in_existing_vcs) { - (None, false) => VersionControl::Git, - (Some(opt), false) => opt, - (_, true) => VersionControl::NoVcs, - } - }); - - init_vcs(path, vcs, config)?; - write_ignore_file(path, &ignore, vcs)?; - - let mut cargotoml_path_specifier = String::new(); - - // Calculate what `[lib]` and `[[bin]]`s we need to append to `Cargo.toml`. - - for i in &opts.source_files { - if i.bin { - if i.relative_path != "src/main.rs" { - cargotoml_path_specifier.push_str(&format!( - r#" -[[bin]] -name = "{}" -path = {} -"#, - i.target_name, - toml::Value::String(i.relative_path.clone()) - )); - } - } else if i.relative_path != "src/lib.rs" { - cargotoml_path_specifier.push_str(&format!( - r#" -[lib] -name = "{}" -path = {} -"#, - i.target_name, - toml::Value::String(i.relative_path.clone()) - )); - } - } - - // Create `Cargo.toml` file with necessary `[lib]` and `[[bin]]` sections, if needed. - - paths::write( - &path.join("Cargo.toml"), - format!( - r#"[package] -name = "{}" -version = "0.1.0" -edition = {} -{} -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -{}"#, - name, - match opts.edition { - Some(edition) => toml::Value::String(edition.to_string()), - None => toml::Value::String(Edition::LATEST_STABLE.to_string()), - }, - match opts.registry { - Some(registry) => format!( - "publish = {}\n", - toml::Value::Array(vec!(toml::Value::String(registry.to_string()))) - ), - None => "".to_string(), - }, - cargotoml_path_specifier - ) - .as_bytes(), - )?; - - // Create all specified source files (with respective parent directories) if they don't exist. - - for i in &opts.source_files { - let path_of_source_file = path.join(i.relative_path.clone()); - - if let Some(src_dir) = path_of_source_file.parent() { - paths::create_dir_all(src_dir)?; - } - - let default_file_content: &[u8] = if i.bin { - b"\ -fn main() { - println!(\"Hello, world!\"); -} -" - } else { - b"\ -#[cfg(test)] -mod tests { - #[test] - fn it_works() { - let result = 2 + 2; - assert_eq!(result, 4); - } -} -" - }; - - if !path_of_source_file.is_file() { - paths::write(&path_of_source_file, default_file_content)?; - - // Format the newly created source file - match Command::new("rustfmt").arg(&path_of_source_file).output() { - Err(e) => log::warn!("failed to call rustfmt: {}", e), - Ok(output) => { - if !output.status.success() { - log::warn!("rustfmt failed: {:?}", from_utf8(&output.stdout)); - } - } - }; - } - } - - if let Err(e) = Workspace::new(&path.join("Cargo.toml"), config) { - crate::display_warning_with_error( - "compiling this new package may not work due to invalid \ - workspace configuration", - &e, - &mut config.shell(), - ); - } - - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_output_metadata.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_output_metadata.rs deleted file mode 100644 index b2e100f6c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_output_metadata.rs +++ /dev/null @@ -1,242 +0,0 @@ -use crate::core::compiler::{CompileKind, RustcTargetData}; -use crate::core::dependency::DepKind; -use crate::core::package::SerializedPackage; -use crate::core::resolver::{features::CliFeatures, HasDevUnits, Resolve}; -use crate::core::{Dependency, Package, PackageId, Workspace}; -use crate::ops::{self, Packages}; -use crate::util::interning::InternedString; -use crate::util::CargoResult; -use cargo_platform::Platform; -use serde::Serialize; -use std::collections::BTreeMap; -use std::path::PathBuf; - -const VERSION: u32 = 1; - -pub struct OutputMetadataOptions { - pub cli_features: CliFeatures, - pub no_deps: bool, - pub version: u32, - pub filter_platforms: Vec, -} - -/// Loads the manifest, resolves the dependencies of the package to the concrete -/// used versions - considering overrides - and writes all dependencies in a JSON -/// format to stdout. -pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> CargoResult { - if opt.version != VERSION { - anyhow::bail!( - "metadata version {} not supported, only {} is currently supported", - opt.version, - VERSION - ); - } - let config = ws.config(); - let (packages, resolve) = if opt.no_deps { - let packages = ws.members().map(|pkg| pkg.serialized(config)).collect(); - (packages, None) - } else { - let (packages, resolve) = build_resolve_graph(ws, opt)?; - (packages, Some(resolve)) - }; - - Ok(ExportInfo { - packages, - workspace_members: ws.members().map(|pkg| pkg.package_id()).collect(), - resolve, - target_directory: ws.target_dir().into_path_unlocked(), - version: VERSION, - workspace_root: ws.root().to_path_buf(), - metadata: ws.custom_metadata().cloned(), - }) -} - -/// This is the structure that is serialized and displayed to the user. -/// -/// See cargo-metadata.adoc for detailed documentation of the format. -#[derive(Serialize)] -pub struct ExportInfo { - packages: Vec, - workspace_members: Vec, - resolve: Option, - target_directory: PathBuf, - version: u32, - workspace_root: PathBuf, - metadata: Option, -} - -#[derive(Serialize)] -struct MetadataResolve { - nodes: Vec, - root: Option, -} - -#[derive(Serialize)] -struct MetadataResolveNode { - id: PackageId, - dependencies: Vec, - deps: Vec, - features: Vec, -} - -#[derive(Serialize)] -struct Dep { - name: String, - pkg: PackageId, - dep_kinds: Vec, -} - -#[derive(Serialize, PartialEq, Eq, PartialOrd, Ord)] -struct DepKindInfo { - kind: DepKind, - target: Option, -} - -impl From<&Dependency> for DepKindInfo { - fn from(dep: &Dependency) -> DepKindInfo { - DepKindInfo { - kind: dep.kind(), - target: dep.platform().cloned(), - } - } -} - -/// Builds the resolve graph as it will be displayed to the user. -fn build_resolve_graph( - ws: &Workspace<'_>, - metadata_opts: &OutputMetadataOptions, -) -> CargoResult<(Vec, MetadataResolve)> { - // TODO: Without --filter-platform, features are being resolved for `host` only. - // How should this work? - let requested_kinds = - CompileKind::from_requested_targets(ws.config(), &metadata_opts.filter_platforms)?; - let target_data = RustcTargetData::new(ws, &requested_kinds)?; - // Resolve entire workspace. - let specs = Packages::All.to_package_id_specs(ws)?; - let force_all = if metadata_opts.filter_platforms.is_empty() { - crate::core::resolver::features::ForceAllTargets::Yes - } else { - crate::core::resolver::features::ForceAllTargets::No - }; - - // Note that even with --filter-platform we end up downloading host dependencies as well, - // as that is the behavior of download_accessible. - let ws_resolve = ops::resolve_ws_with_opts( - ws, - &target_data, - &requested_kinds, - &metadata_opts.cli_features, - &specs, - HasDevUnits::Yes, - force_all, - )?; - - let package_map: BTreeMap = ws_resolve - .pkg_set - .packages() - // This is a little lazy, but serde doesn't handle Rc fields very well. - .map(|pkg| (pkg.package_id(), Package::clone(pkg))) - .collect(); - - // Start from the workspace roots, and recurse through filling out the - // map, filtering targets as necessary. - let mut node_map = BTreeMap::new(); - for member_pkg in ws.members() { - build_resolve_graph_r( - &mut node_map, - member_pkg.package_id(), - &ws_resolve.targeted_resolve, - &package_map, - &target_data, - &requested_kinds, - ); - } - // Get a Vec of Packages. - let config = ws.config(); - let actual_packages = package_map - .into_iter() - .filter_map(|(pkg_id, pkg)| node_map.get(&pkg_id).map(|_| pkg)) - .map(|pkg| pkg.serialized(config)) - .collect(); - - let mr = MetadataResolve { - nodes: node_map.into_iter().map(|(_pkg_id, node)| node).collect(), - root: ws.current_opt().map(|pkg| pkg.package_id()), - }; - Ok((actual_packages, mr)) -} - -fn build_resolve_graph_r( - node_map: &mut BTreeMap, - pkg_id: PackageId, - resolve: &Resolve, - package_map: &BTreeMap, - target_data: &RustcTargetData<'_>, - requested_kinds: &[CompileKind], -) { - if node_map.contains_key(&pkg_id) { - return; - } - // This normalizes the IDs so that they are consistent between the - // `packages` array and the `resolve` map. This is a bit of a hack to - // compensate for the fact that - // SourceKind::Git(GitReference::Branch("master")) is the same as - // SourceKind::Git(GitReference::DefaultBranch). We want IDs in the JSON - // to be opaque, and compare with basic string equality, so this will - // always prefer the style of ID in the Package instead of the resolver. - // Cargo generally only exposes PackageIds from the Package struct, and - // AFAIK this is the only place where the resolver variant is exposed. - // - // This diverges because the SourceIds created for Packages are built - // based on the Dependency declaration, but the SourceIds in the resolver - // are deserialized from Cargo.lock. Cargo.lock may have been generated by - // an older (or newer!) version of Cargo which uses a different style. - let normalize_id = |id| -> PackageId { *package_map.get_key_value(&id).unwrap().0 }; - let features = resolve.features(pkg_id).to_vec(); - - let deps: Vec = resolve - .deps(pkg_id) - .filter(|(_dep_id, deps)| { - if requested_kinds == [CompileKind::Host] { - true - } else { - requested_kinds.iter().any(|kind| { - deps.iter() - .any(|dep| target_data.dep_platform_activated(dep, *kind)) - }) - } - }) - .filter_map(|(dep_id, deps)| { - let mut dep_kinds: Vec<_> = deps.iter().map(DepKindInfo::from).collect(); - dep_kinds.sort(); - package_map - .get(&dep_id) - .and_then(|pkg| pkg.targets().iter().find(|t| t.is_lib())) - .and_then(|lib_target| resolve.extern_crate_name(pkg_id, dep_id, lib_target).ok()) - .map(|name| Dep { - name, - pkg: normalize_id(dep_id), - dep_kinds, - }) - }) - .collect(); - let dumb_deps: Vec = deps.iter().map(|dep| normalize_id(dep.pkg)).collect(); - let to_visit = dumb_deps.clone(); - let node = MetadataResolveNode { - id: normalize_id(pkg_id), - dependencies: dumb_deps, - deps, - features, - }; - node_map.insert(pkg_id, node); - for dep_id in to_visit { - build_resolve_graph_r( - node_map, - dep_id, - resolve, - package_map, - target_data, - requested_kinds, - ); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_package.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_package.rs deleted file mode 100644 index ed264079b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_package.rs +++ /dev/null @@ -1,885 +0,0 @@ -use std::collections::{BTreeSet, HashMap}; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::io::SeekFrom; -use std::path::{Path, PathBuf}; -use std::rc::Rc; -use std::sync::Arc; - -use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; -use crate::core::resolver::CliFeatures; -use crate::core::{Feature, Shell, Verbosity, Workspace}; -use crate::core::{Package, PackageId, PackageSet, Resolve, Source, SourceId}; -use crate::sources::PathSource; -use crate::util::errors::CargoResult; -use crate::util::toml::TomlManifest; -use crate::util::{self, restricted_names, Config, FileLock}; -use crate::{drop_println, ops}; -use anyhow::Context as _; -use cargo_util::paths; -use flate2::read::GzDecoder; -use flate2::{Compression, GzBuilder}; -use log::debug; -use serde::Serialize; -use tar::{Archive, Builder, EntryType, Header, HeaderMode}; - -pub struct PackageOpts<'cfg> { - pub config: &'cfg Config, - pub list: bool, - pub check_metadata: bool, - pub allow_dirty: bool, - pub verify: bool, - pub jobs: Option, - pub to_package: ops::Packages, - pub targets: Vec, - pub cli_features: CliFeatures, -} - -const VCS_INFO_FILE: &str = ".cargo_vcs_info.json"; - -struct ArchiveFile { - /// The relative path in the archive (not including the top-level package - /// name directory). - rel_path: PathBuf, - /// String variant of `rel_path`, for convenience. - rel_str: String, - /// The contents to add to the archive. - contents: FileContents, -} - -enum FileContents { - /// Absolute path to the file on disk to add to the archive. - OnDisk(PathBuf), - /// Generates a file. - Generated(GeneratedFile), -} - -enum GeneratedFile { - /// Generates `Cargo.toml` by rewriting the original. - Manifest, - /// Generates `Cargo.lock` in some cases (like if there is a binary). - Lockfile, - /// Adds a `.cargo_vcs_info.json` file if in a (clean) git repo. - VcsInfo(VcsInfo), -} - -#[derive(Serialize)] -struct VcsInfo { - git: GitVcsInfo, - /// Path to the package within repo (empty string if root). / not \ - path_in_vcs: String, -} - -#[derive(Serialize)] -struct GitVcsInfo { - sha1: String, -} - -pub fn package_one( - ws: &Workspace<'_>, - pkg: &Package, - opts: &PackageOpts<'_>, -) -> CargoResult> { - let config = ws.config(); - let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), config); - src.update()?; - - if opts.check_metadata { - check_metadata(pkg, config)?; - } - - if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() { - config.shell().warn( - "both package.include and package.exclude are specified; \ - the exclude list will be ignored", - )?; - } - let src_files = src.list_files(pkg)?; - - // Check (git) repository state, getting the current commit hash if not - // dirty. - let vcs_info = if !opts.allow_dirty { - // This will error if a dirty repo is found. - check_repo_state(pkg, &src_files, config)? - } else { - None - }; - - let ar_files = build_ar_list(ws, pkg, src_files, vcs_info)?; - - if opts.list { - for ar_file in ar_files { - drop_println!(config, "{}", ar_file.rel_str); - } - - return Ok(None); - } - - // Check that the package dependencies are safe to deploy. - for dep in pkg.dependencies() { - super::check_dep_has_version(dep, false)?; - } - - let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); - let dir = ws.target_dir().join("package"); - let mut dst = { - let tmp = format!(".{}", filename); - dir.open_rw(&tmp, config, "package scratch space")? - }; - - // Package up and test a temporary tarball and only move it to the final - // location if it actually passes all our tests. Any previously existing - // tarball can be assumed as corrupt or invalid, so we just blow it away if - // it exists. - config - .shell() - .status("Packaging", pkg.package_id().to_string())?; - dst.file().set_len(0)?; - tar(ws, pkg, ar_files, dst.file(), &filename) - .with_context(|| "failed to prepare local package for uploading")?; - if opts.verify { - dst.seek(SeekFrom::Start(0))?; - run_verify(ws, pkg, &dst, opts).with_context(|| "failed to verify package tarball")? - } - - dst.seek(SeekFrom::Start(0))?; - let src_path = dst.path(); - let dst_path = dst.parent().join(&filename); - fs::rename(&src_path, &dst_path) - .with_context(|| "failed to move temporary tarball into final location")?; - - return Ok(Some(dst)); -} - -pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult>> { - let pkgs = ws.members_with_features( - &opts.to_package.to_package_id_specs(ws)?, - &opts.cli_features, - )?; - - let mut dsts = Vec::with_capacity(pkgs.len()); - - if ws.root().join("Cargo.lock").exists() { - // Make sure the Cargo.lock is up-to-date and valid. - let _ = ops::resolve_ws(ws)?; - // If Cargo.lock does not exist, it will be generated by `build_lock` - // below, and will be validated during the verification step. - } - - for (pkg, cli_features) in pkgs { - let result = package_one( - ws, - pkg, - &PackageOpts { - config: opts.config, - list: opts.list, - check_metadata: opts.check_metadata, - allow_dirty: opts.allow_dirty, - verify: opts.verify, - jobs: opts.jobs, - to_package: ops::Packages::Default, - targets: opts.targets.clone(), - cli_features: cli_features, - }, - )?; - - if !opts.list { - dsts.push(result.unwrap()); - } - } - - if opts.list { - // We're just listing, so there's no file output - Ok(None) - } else { - Ok(Some(dsts)) - } -} - -/// Builds list of files to archive. -fn build_ar_list( - ws: &Workspace<'_>, - pkg: &Package, - src_files: Vec, - vcs_info: Option, -) -> CargoResult> { - let mut result = Vec::new(); - let root = pkg.root(); - for src_file in src_files { - let rel_path = src_file.strip_prefix(&root)?.to_path_buf(); - check_filename(&rel_path, &mut ws.config().shell())?; - let rel_str = rel_path - .to_str() - .ok_or_else(|| { - anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display()) - })? - .to_string(); - match rel_str.as_ref() { - "Cargo.toml" => { - result.push(ArchiveFile { - rel_path: PathBuf::from("Cargo.toml.orig"), - rel_str: "Cargo.toml.orig".to_string(), - contents: FileContents::OnDisk(src_file), - }); - result.push(ArchiveFile { - rel_path, - rel_str, - contents: FileContents::Generated(GeneratedFile::Manifest), - }); - } - "Cargo.lock" => continue, - VCS_INFO_FILE => anyhow::bail!( - "invalid inclusion of reserved file name \ - {} in package source", - VCS_INFO_FILE - ), - _ => { - result.push(ArchiveFile { - rel_path, - rel_str, - contents: FileContents::OnDisk(src_file), - }); - } - } - } - if pkg.include_lockfile() { - result.push(ArchiveFile { - rel_path: PathBuf::from("Cargo.lock"), - rel_str: "Cargo.lock".to_string(), - contents: FileContents::Generated(GeneratedFile::Lockfile), - }); - } - if let Some(vcs_info) = vcs_info { - result.push(ArchiveFile { - rel_path: PathBuf::from(VCS_INFO_FILE), - rel_str: VCS_INFO_FILE.to_string(), - contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)), - }); - } - if let Some(license_file) = &pkg.manifest().metadata().license_file { - let license_path = Path::new(license_file); - let abs_license_path = paths::normalize_path(&pkg.root().join(license_path)); - if abs_license_path.exists() { - match abs_license_path.strip_prefix(&pkg.root()) { - Ok(rel_license_path) => { - if !result.iter().any(|ar| ar.rel_path == rel_license_path) { - result.push(ArchiveFile { - rel_path: rel_license_path.to_path_buf(), - rel_str: rel_license_path - .to_str() - .expect("everything was utf8") - .to_string(), - contents: FileContents::OnDisk(abs_license_path), - }); - } - } - Err(_) => { - // The license exists somewhere outside of the package. - let license_name = license_path.file_name().unwrap(); - if result - .iter() - .any(|ar| ar.rel_path.file_name().unwrap() == license_name) - { - ws.config().shell().warn(&format!( - "license-file `{}` appears to be a path outside of the package, \ - but there is already a file named `{}` in the root of the package. \ - The archived crate will contain the copy in the root of the package. \ - Update the license-file to point to the path relative \ - to the root of the package to remove this warning.", - license_file, - license_name.to_str().unwrap() - ))?; - } else { - result.push(ArchiveFile { - rel_path: PathBuf::from(license_name), - rel_str: license_name.to_str().unwrap().to_string(), - contents: FileContents::OnDisk(abs_license_path), - }); - } - } - } - } else { - let rel_msg = if license_path.is_absolute() { - "".to_string() - } else { - format!(" (relative to `{}`)", pkg.root().display()) - }; - ws.config().shell().warn(&format!( - "license-file `{}` does not appear to exist{}.\n\ - Please update the license-file setting in the manifest at `{}`\n\ - This may become a hard error in the future.", - license_path.display(), - rel_msg, - pkg.manifest_path().display() - ))?; - } - } - result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path)); - - Ok(result) -} - -/// Construct `Cargo.lock` for the package to be published. -fn build_lock(ws: &Workspace<'_>, orig_pkg: &Package) -> CargoResult { - let config = ws.config(); - let orig_resolve = ops::load_pkg_lockfile(ws)?; - - // Convert Package -> TomlManifest -> Manifest -> Package - let toml_manifest = Rc::new( - orig_pkg - .manifest() - .original() - .prepare_for_publish(ws, orig_pkg.root())?, - ); - let package_root = orig_pkg.root(); - let source_id = orig_pkg.package_id().source_id(); - let (manifest, _nested_paths) = - TomlManifest::to_real_manifest(&toml_manifest, source_id, package_root, config)?; - let new_pkg = Package::new(manifest, orig_pkg.manifest_path()); - - // Regenerate Cargo.lock using the old one as a guide. - let tmp_ws = Workspace::ephemeral(new_pkg, ws.config(), None, true)?; - let (pkg_set, mut new_resolve) = ops::resolve_ws(&tmp_ws)?; - - if let Some(orig_resolve) = orig_resolve { - compare_resolve(config, tmp_ws.current()?, &orig_resolve, &new_resolve)?; - } - check_yanked(config, &pkg_set, &new_resolve)?; - - ops::resolve_to_string(&tmp_ws, &mut new_resolve) -} - -// Checks that the package has some piece of metadata that a human can -// use to tell what the package is about. -fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { - let md = pkg.manifest().metadata(); - - let mut missing = vec![]; - - macro_rules! lacking { - ($( $($field: ident)||* ),*) => {{ - $( - if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* { - $(missing.push(stringify!($field).replace("_", "-"));)* - } - )* - }} - } - lacking!( - description, - license || license_file, - documentation || homepage || repository - ); - - if !missing.is_empty() { - let mut things = missing[..missing.len() - 1].join(", "); - // `things` will be empty if and only if its length is 1 (i.e., the only case - // to have no `or`). - if !things.is_empty() { - things.push_str(" or "); - } - things.push_str(missing.last().unwrap()); - - config.shell().warn(&format!( - "manifest has no {things}.\n\ - See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.", - things = things - ))? - } - - Ok(()) -} - -/// Checks if the package source is in a *git* DVCS repository. If *git*, and -/// the source is *dirty* (e.g., has uncommitted changes) then `bail!` with an -/// informative message. Otherwise return the sha1 hash of the current *HEAD* -/// commit, or `None` if no repo is found. -fn check_repo_state( - p: &Package, - src_files: &[PathBuf], - config: &Config, -) -> CargoResult> { - if let Ok(repo) = git2::Repository::discover(p.root()) { - if let Some(workdir) = repo.workdir() { - debug!("found a git repo at {:?}", workdir); - let path = p.manifest_path(); - let path = path.strip_prefix(workdir).unwrap_or(path); - if let Ok(status) = repo.status_file(path) { - if (status & git2::Status::IGNORED).is_empty() { - debug!( - "found (git) Cargo.toml at {:?} in workdir {:?}", - path, workdir - ); - let path_in_vcs = path - .parent() - .and_then(|p| p.to_str()) - .unwrap_or("") - .replace("\\", "/"); - return Ok(Some(VcsInfo { - git: git(p, src_files, &repo)?, - path_in_vcs, - })); - } - } - config.shell().verbose(|shell| { - shell.warn(format!( - "No (git) Cargo.toml found at `{}` in workdir `{}`", - path.display(), - workdir.display() - )) - })?; - } - } else { - config.shell().verbose(|shell| { - shell.warn(format!("No (git) VCS found for `{}`", p.root().display())) - })?; - } - - // No VCS with a checked in `Cargo.toml` found, so we don't know if the - // directory is dirty or not, thus we have to assume that it's clean. - return Ok(None); - - fn git(p: &Package, src_files: &[PathBuf], repo: &git2::Repository) -> CargoResult { - // This is a collection of any dirty or untracked files. This covers: - // - new/modified/deleted/renamed/type change (index or worktree) - // - untracked files (which are "new" worktree files) - // - ignored (in case the user has an `include` directive that - // conflicts with .gitignore). - let mut dirty_files = Vec::new(); - collect_statuses(repo, &mut dirty_files)?; - // Include each submodule so that the error message can provide - // specifically *which* files in a submodule are modified. - status_submodules(repo, &mut dirty_files)?; - - // Find the intersection of dirty in git, and the src_files that would - // be packaged. This is a lazy n^2 check, but seems fine with - // thousands of files. - let dirty_src_files: Vec = src_files - .iter() - .filter(|src_file| dirty_files.iter().any(|path| src_file.starts_with(path))) - .map(|path| { - path.strip_prefix(p.root()) - .unwrap_or(path) - .display() - .to_string() - }) - .collect(); - if dirty_src_files.is_empty() { - let rev_obj = repo.revparse_single("HEAD")?; - Ok(GitVcsInfo { - sha1: rev_obj.id().to_string(), - }) - } else { - anyhow::bail!( - "{} files in the working directory contain changes that were \ - not yet committed into git:\n\n{}\n\n\ - to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag", - dirty_src_files.len(), - dirty_src_files.join("\n") - ) - } - } - - // Helper to collect dirty statuses for a single repo. - fn collect_statuses( - repo: &git2::Repository, - dirty_files: &mut Vec, - ) -> CargoResult<()> { - let mut status_opts = git2::StatusOptions::new(); - // Exclude submodules, as they are being handled manually by recursing - // into each one so that details about specific files can be - // retrieved. - status_opts - .exclude_submodules(true) - .include_ignored(true) - .include_untracked(true); - let repo_statuses = repo.statuses(Some(&mut status_opts)).with_context(|| { - format!( - "failed to retrieve git status from repo {}", - repo.path().display() - ) - })?; - let workdir = repo.workdir().unwrap(); - let this_dirty = repo_statuses.iter().filter_map(|entry| { - let path = entry.path().expect("valid utf-8 path"); - if path.ends_with("Cargo.lock") && entry.status() == git2::Status::IGNORED { - // It is OK to include Cargo.lock even if it is ignored. - return None; - } - // Use an absolute path, so that comparing paths is easier - // (particularly with submodules). - Some(workdir.join(path)) - }); - dirty_files.extend(this_dirty); - Ok(()) - } - - // Helper to collect dirty statuses while recursing into submodules. - fn status_submodules( - repo: &git2::Repository, - dirty_files: &mut Vec, - ) -> CargoResult<()> { - for submodule in repo.submodules()? { - // Ignore submodules that don't open, they are probably not initialized. - // If its files are required, then the verification step should fail. - if let Ok(sub_repo) = submodule.open() { - status_submodules(&sub_repo, dirty_files)?; - collect_statuses(&sub_repo, dirty_files)?; - } - } - Ok(()) - } -} - -fn tar( - ws: &Workspace<'_>, - pkg: &Package, - ar_files: Vec, - dst: &File, - filename: &str, -) -> CargoResult<()> { - // Prepare the encoder and its header. - let filename = Path::new(filename); - let encoder = GzBuilder::new() - .filename(paths::path2bytes(filename)?) - .write(dst, Compression::best()); - - // Put all package files into a compressed archive. - let mut ar = Builder::new(encoder); - let config = ws.config(); - - let base_name = format!("{}-{}", pkg.name(), pkg.version()); - let base_path = Path::new(&base_name); - for ar_file in ar_files { - let ArchiveFile { - rel_path, - rel_str, - contents, - } = ar_file; - let ar_path = base_path.join(&rel_path); - config - .shell() - .verbose(|shell| shell.status("Archiving", &rel_str))?; - let mut header = Header::new_gnu(); - match contents { - FileContents::OnDisk(disk_path) => { - let mut file = File::open(&disk_path).with_context(|| { - format!("failed to open for archiving: `{}`", disk_path.display()) - })?; - let metadata = file.metadata().with_context(|| { - format!("could not learn metadata for: `{}`", disk_path.display()) - })?; - header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic); - header.set_cksum(); - ar.append_data(&mut header, &ar_path, &mut file) - .with_context(|| { - format!("could not archive source file `{}`", disk_path.display()) - })?; - } - FileContents::Generated(generated_kind) => { - let contents = match generated_kind { - GeneratedFile::Manifest => pkg.to_registry_toml(ws)?, - GeneratedFile::Lockfile => build_lock(ws, pkg)?, - GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?, - }; - header.set_entry_type(EntryType::file()); - header.set_mode(0o644); - header.set_size(contents.len() as u64); - // use something nonzero to avoid rust-lang/cargo#9512 - header.set_mtime(1); - header.set_cksum(); - ar.append_data(&mut header, &ar_path, contents.as_bytes()) - .with_context(|| format!("could not archive source file `{}`", rel_str))?; - } - } - } - - let encoder = ar.into_inner()?; - encoder.finish()?; - Ok(()) -} - -/// Generate warnings when packaging Cargo.lock, and the resolve have changed. -fn compare_resolve( - config: &Config, - current_pkg: &Package, - orig_resolve: &Resolve, - new_resolve: &Resolve, -) -> CargoResult<()> { - if config.shell().verbosity() != Verbosity::Verbose { - return Ok(()); - } - let new_set: BTreeSet = new_resolve.iter().collect(); - let orig_set: BTreeSet = orig_resolve.iter().collect(); - let added = new_set.difference(&orig_set); - // Removed entries are ignored, this is used to quickly find hints for why - // an entry changed. - let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect(); - for pkg_id in added { - if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() { - // Skip the package that is being created, since its SourceId - // (directory) changes. - continue; - } - // Check for candidates where the source has changed (such as [patch] - // or a dependency with multiple sources like path/version). - let removed_candidates: Vec<&PackageId> = removed - .iter() - .filter(|orig_pkg_id| { - orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version() - }) - .cloned() - .collect(); - let extra = match removed_candidates.len() { - 0 => { - // This can happen if the original was out of date. - let previous_versions: Vec<&PackageId> = removed - .iter() - .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name()) - .cloned() - .collect(); - match previous_versions.len() { - 0 => String::new(), - 1 => format!( - ", previous version was `{}`", - previous_versions[0].version() - ), - _ => format!( - ", previous versions were: {}", - previous_versions - .iter() - .map(|pkg_id| format!("`{}`", pkg_id.version())) - .collect::>() - .join(", ") - ), - } - } - 1 => { - // This can happen for multi-sourced dependencies like - // `{path="...", version="..."}` or `[patch]` replacement. - // `[replace]` is not captured in Cargo.lock. - format!( - ", was originally sourced from `{}`", - removed_candidates[0].source_id() - ) - } - _ => { - // I don't know if there is a way to actually trigger this, - // but handle it just in case. - let comma_list = removed_candidates - .iter() - .map(|pkg_id| format!("`{}`", pkg_id.source_id())) - .collect::>() - .join(", "); - format!( - ", was originally sourced from one of these sources: {}", - comma_list - ) - } - }; - let msg = format!( - "package `{}` added to the packaged Cargo.lock file{}", - pkg_id, extra - ); - config.shell().note(msg)?; - } - Ok(()) -} - -fn check_yanked(config: &Config, pkg_set: &PackageSet<'_>, resolve: &Resolve) -> CargoResult<()> { - // Checking the yanked status involves taking a look at the registry and - // maybe updating files, so be sure to lock it here. - let _lock = config.acquire_package_cache_lock()?; - - let mut sources = pkg_set.sources_mut(); - for pkg_id in resolve.iter() { - if let Some(source) = sources.get_mut(pkg_id.source_id()) { - if source.is_yanked(pkg_id)? { - config.shell().warn(format!( - "package `{}` in Cargo.lock is yanked in registry `{}`, \ - consider updating to a version that is not yanked", - pkg_id, - pkg_id.source_id().display_registry_name() - ))?; - } - } - } - Ok(()) -} - -fn run_verify( - ws: &Workspace<'_>, - pkg: &Package, - tar: &FileLock, - opts: &PackageOpts<'_>, -) -> CargoResult<()> { - let config = ws.config(); - - config.shell().status("Verifying", pkg)?; - - let f = GzDecoder::new(tar.file()); - let dst = tar - .parent() - .join(&format!("{}-{}", pkg.name(), pkg.version())); - if dst.exists() { - paths::remove_dir_all(&dst)?; - } - let mut archive = Archive::new(f); - // We don't need to set the Modified Time, as it's not relevant to verification - // and it errors on filesystems that don't support setting a modified timestamp - archive.set_preserve_mtime(false); - archive.unpack(dst.parent().unwrap())?; - - // Manufacture an ephemeral workspace to ensure that even if the top-level - // package has a workspace we can still build our new crate. - let id = SourceId::for_path(&dst)?; - let mut src = PathSource::new(&dst, id, ws.config()); - let new_pkg = src.root_package()?; - let pkg_fingerprint = hash_all(&dst)?; - let ws = Workspace::ephemeral(new_pkg, config, None, true)?; - - let rustc_args = if pkg - .manifest() - .unstable_features() - .require(Feature::public_dependency()) - .is_ok() - { - // FIXME: Turn this on at some point in the future - //Some(vec!["-D exported_private_dependencies".to_string()]) - Some(vec![]) - } else { - None - }; - - let exec: Arc = Arc::new(DefaultExecutor); - ops::compile_with_exec( - &ws, - &ops::CompileOptions { - build_config: BuildConfig::new(config, opts.jobs, &opts.targets, CompileMode::Build)?, - cli_features: opts.cli_features.clone(), - spec: ops::Packages::Packages(Vec::new()), - filter: ops::CompileFilter::Default { - required_features_filterable: true, - }, - target_rustdoc_args: None, - target_rustc_args: rustc_args, - target_rustc_crate_types: None, - local_rustdoc_args: None, - rustdoc_document_private_items: false, - honor_rust_version: true, - }, - &exec, - )?; - - // Check that `build.rs` didn't modify any files in the `src` directory. - let ws_fingerprint = hash_all(&dst)?; - if pkg_fingerprint != ws_fingerprint { - let changes = report_hash_difference(&pkg_fingerprint, &ws_fingerprint); - anyhow::bail!( - "Source directory was modified by build.rs during cargo publish. \ - Build scripts should not modify anything outside of OUT_DIR.\n\ - {}\n\n\ - To proceed despite this, pass the `--no-verify` flag.", - changes - ) - } - - Ok(()) -} - -fn hash_all(path: &Path) -> CargoResult> { - fn wrap(path: &Path) -> CargoResult> { - let mut result = HashMap::new(); - let walker = walkdir::WalkDir::new(path).into_iter(); - for entry in walker.filter_entry(|e| !(e.depth() == 1 && e.file_name() == "target")) { - let entry = entry?; - let file_type = entry.file_type(); - if file_type.is_file() { - let file = File::open(entry.path())?; - let hash = util::hex::hash_u64_file(&file)?; - result.insert(entry.path().to_path_buf(), hash); - } else if file_type.is_symlink() { - let hash = util::hex::hash_u64(&fs::read_link(entry.path())?); - result.insert(entry.path().to_path_buf(), hash); - } else if file_type.is_dir() { - let hash = util::hex::hash_u64(&()); - result.insert(entry.path().to_path_buf(), hash); - } - } - Ok(result) - } - let result = wrap(path).with_context(|| format!("failed to verify output at {:?}", path))?; - Ok(result) -} - -fn report_hash_difference(orig: &HashMap, after: &HashMap) -> String { - let mut changed = Vec::new(); - let mut removed = Vec::new(); - for (key, value) in orig { - match after.get(key) { - Some(after_value) => { - if value != after_value { - changed.push(key.to_string_lossy()); - } - } - None => removed.push(key.to_string_lossy()), - } - } - let mut added: Vec<_> = after - .keys() - .filter(|key| !orig.contains_key(*key)) - .map(|key| key.to_string_lossy()) - .collect(); - let mut result = Vec::new(); - if !changed.is_empty() { - changed.sort_unstable(); - result.push(format!("Changed: {}", changed.join("\n\t"))); - } - if !added.is_empty() { - added.sort_unstable(); - result.push(format!("Added: {}", added.join("\n\t"))); - } - if !removed.is_empty() { - removed.sort_unstable(); - result.push(format!("Removed: {}", removed.join("\n\t"))); - } - assert!(!result.is_empty(), "unexpected empty change detection"); - result.join("\n") -} - -// It can often be the case that files of a particular name on one platform -// can't actually be created on another platform. For example files with colons -// in the name are allowed on Unix but not on Windows. -// -// To help out in situations like this, issue about weird filenames when -// packaging as a "heads up" that something may not work on other platforms. -fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> { - let name = match file.file_name() { - Some(name) => name, - None => return Ok(()), - }; - let name = match name.to_str() { - Some(name) => name, - None => anyhow::bail!( - "path does not have a unicode filename which may not unpack \ - on all platforms: {}", - file.display() - ), - }; - let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*']; - if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) { - anyhow::bail!( - "cannot package a filename with a special character `{}`: {}", - c, - file.display() - ) - } - if restricted_names::is_windows_reserved_path(file) { - shell.warn(format!( - "file {} is a reserved Windows filename, \ - it will not work on Windows platforms", - file.display() - ))?; - } - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_pkgid.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_pkgid.rs deleted file mode 100644 index eeed6ac02..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_pkgid.rs +++ /dev/null @@ -1,16 +0,0 @@ -use crate::core::{PackageIdSpec, Workspace}; -use crate::ops; -use crate::util::CargoResult; - -pub fn pkgid(ws: &Workspace<'_>, spec: Option<&str>) -> CargoResult { - let resolve = match ops::load_pkg_lockfile(ws)? { - Some(resolve) => resolve, - None => anyhow::bail!("a Cargo.lock must exist for this command"), - }; - - let pkgid = match spec { - Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?, - None => ws.current()?.package_id(), - }; - Ok(PackageIdSpec::from_package_id(pkgid)) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_read_manifest.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_read_manifest.rs deleted file mode 100644 index d55208b88..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_read_manifest.rs +++ /dev/null @@ -1,229 +0,0 @@ -use std::collections::{HashMap, HashSet}; -use std::fs; -use std::io; -use std::path::{Path, PathBuf}; - -use crate::core::{EitherManifest, Package, PackageId, SourceId}; -use crate::util::errors::CargoResult; -use crate::util::important_paths::find_project_manifest_exact; -use crate::util::toml::read_manifest; -use crate::util::Config; -use cargo_util::paths; -use log::{info, trace}; - -pub fn read_package( - path: &Path, - source_id: SourceId, - config: &Config, -) -> CargoResult<(Package, Vec)> { - trace!( - "read_package; path={}; source-id={}", - path.display(), - source_id - ); - let (manifest, nested) = read_manifest(path, source_id, config)?; - let manifest = match manifest { - EitherManifest::Real(manifest) => manifest, - EitherManifest::Virtual(..) => anyhow::bail!( - "found a virtual manifest at `{}` instead of a package \ - manifest", - path.display() - ), - }; - - Ok((Package::new(manifest, path), nested)) -} - -pub fn read_packages( - path: &Path, - source_id: SourceId, - config: &Config, -) -> CargoResult> { - let mut all_packages = HashMap::new(); - let mut visited = HashSet::::new(); - let mut errors = Vec::::new(); - - trace!( - "looking for root package: {}, source_id={}", - path.display(), - source_id - ); - - walk(path, &mut |dir| { - trace!("looking for child package: {}", dir.display()); - - // Don't recurse into hidden/dot directories unless we're at the toplevel - if dir != path { - let name = dir.file_name().and_then(|s| s.to_str()); - if name.map(|s| s.starts_with('.')) == Some(true) { - return Ok(false); - } - - // Don't automatically discover packages across git submodules - if dir.join(".git").exists() { - return Ok(false); - } - } - - // Don't ever look at target directories - if dir.file_name().and_then(|s| s.to_str()) == Some("target") - && has_manifest(dir.parent().unwrap()) - { - return Ok(false); - } - - if has_manifest(dir) { - read_nested_packages( - dir, - &mut all_packages, - source_id, - config, - &mut visited, - &mut errors, - )?; - } - Ok(true) - })?; - - if all_packages.is_empty() { - match errors.pop() { - Some(err) => Err(err), - None => { - if find_project_manifest_exact(path, "cargo.toml").is_ok() { - Err(anyhow::format_err!( - "Could not find Cargo.toml in `{}`, but found cargo.toml please try to rename it to Cargo.toml", - path.display() - )) - } else { - Err(anyhow::format_err!( - "Could not find Cargo.toml in `{}`", - path.display() - )) - } - } - } - } else { - Ok(all_packages.into_iter().map(|(_, v)| v).collect()) - } -} - -fn walk(path: &Path, callback: &mut dyn FnMut(&Path) -> CargoResult) -> CargoResult<()> { - if !callback(path)? { - trace!("not processing {}", path.display()); - return Ok(()); - } - - // Ignore any permission denied errors because temporary directories - // can often have some weird permissions on them. - let dirs = match fs::read_dir(path) { - Ok(dirs) => dirs, - Err(ref e) if e.kind() == io::ErrorKind::PermissionDenied => return Ok(()), - Err(e) => { - let cx = format!("failed to read directory `{}`", path.display()); - let e = anyhow::Error::from(e); - return Err(e.context(cx)); - } - }; - for dir in dirs { - let dir = dir?; - if dir.file_type()?.is_dir() { - walk(&dir.path(), callback)?; - } - } - Ok(()) -} - -fn has_manifest(path: &Path) -> bool { - find_project_manifest_exact(path, "Cargo.toml").is_ok() -} - -fn read_nested_packages( - path: &Path, - all_packages: &mut HashMap, - source_id: SourceId, - config: &Config, - visited: &mut HashSet, - errors: &mut Vec, -) -> CargoResult<()> { - if !visited.insert(path.to_path_buf()) { - return Ok(()); - } - - let manifest_path = find_project_manifest_exact(path, "Cargo.toml")?; - - let (manifest, nested) = match read_manifest(&manifest_path, source_id, config) { - Err(err) => { - // Ignore malformed manifests found on git repositories - // - // git source try to find and read all manifests from the repository - // but since it's not possible to exclude folders from this search - // it's safer to ignore malformed manifests to avoid - // - // TODO: Add a way to exclude folders? - info!( - "skipping malformed package found at `{}`", - path.to_string_lossy() - ); - errors.push(err.into()); - return Ok(()); - } - Ok(tuple) => tuple, - }; - - let manifest = match manifest { - EitherManifest::Real(manifest) => manifest, - EitherManifest::Virtual(..) => return Ok(()), - }; - let pkg = Package::new(manifest, &manifest_path); - - let pkg_id = pkg.package_id(); - use std::collections::hash_map::Entry; - match all_packages.entry(pkg_id) { - Entry::Vacant(v) => { - v.insert(pkg); - } - Entry::Occupied(_) => { - info!( - "skipping nested package `{}` found at `{}`", - pkg.name(), - path.to_string_lossy() - ); - } - } - - // Registry sources are not allowed to have `path=` dependencies because - // they're all translated to actual registry dependencies. - // - // We normalize the path here ensure that we don't infinitely walk around - // looking for crates. By normalizing we ensure that we visit this crate at - // most once. - // - // TODO: filesystem/symlink implications? - if !source_id.is_registry() { - for p in nested.iter() { - let path = paths::normalize_path(&path.join(p)); - let result = - read_nested_packages(&path, all_packages, source_id, config, visited, errors); - // Ignore broken manifests found on git repositories. - // - // A well formed manifest might still fail to load due to reasons - // like referring to a "path" that requires an extra build step. - // - // See https://github.com/rust-lang/cargo/issues/6822. - if let Err(err) = result { - if source_id.is_git() { - info!( - "skipping nested package found at `{}`: {:?}", - path.display(), - &err, - ); - errors.push(err); - } else { - return Err(err); - } - } - } - } - - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_run.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_run.rs deleted file mode 100644 index 69bae2c59..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_run.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::ffi::OsString; -use std::iter; -use std::path::Path; - -use crate::core::compiler::UnitOutput; -use crate::core::{TargetKind, Workspace}; -use crate::ops; -use crate::util::CargoResult; - -pub fn run( - ws: &Workspace<'_>, - options: &ops::CompileOptions, - args: &[OsString], -) -> CargoResult<()> { - let config = ws.config(); - - if options.filter.contains_glob_patterns() { - anyhow::bail!("`cargo run` does not support glob patterns on target selection") - } - - // We compute the `bins` here *just for diagnosis*. The actual set of - // packages to be run is determined by the `ops::compile` call below. - let packages = options.spec.get_packages(ws)?; - let bins: Vec<_> = packages - .into_iter() - .flat_map(|pkg| { - iter::repeat(pkg).zip(pkg.manifest().targets().iter().filter(|target| { - !target.is_lib() - && !target.is_custom_build() - && if !options.filter.is_specific() { - target.is_bin() - } else { - options.filter.target_run(target) - } - })) - }) - .collect(); - - if bins.is_empty() { - if !options.filter.is_specific() { - anyhow::bail!("a bin target must be available for `cargo run`") - } else { - // This will be verified in `cargo_compile`. - } - } - - if bins.len() == 1 { - let target = bins[0].1; - if let TargetKind::ExampleLib(..) = target.kind() { - anyhow::bail!( - "example target `{}` is a library and cannot be executed", - target.name() - ) - } - } - - if bins.len() > 1 { - if !options.filter.is_specific() { - let mut names: Vec<&str> = bins - .into_iter() - .map(|(_pkg, target)| target.name()) - .collect(); - names.sort(); - anyhow::bail!( - "`cargo run` could not determine which binary to run. \ - Use the `--bin` option to specify a binary, \ - or the `default-run` manifest key.\n\ - available binaries: {}", - names.join(", ") - ) - } else { - anyhow::bail!( - "`cargo run` can run at most one executable, but \ - multiple were specified" - ) - } - } - - // `cargo run` is only compatible with one `--target` flag at most - options.build_config.single_requested_kind()?; - - let compile = ops::compile(ws, options)?; - assert_eq!(compile.binaries.len(), 1); - let UnitOutput { - unit, - path, - script_meta, - } = &compile.binaries[0]; - let exe = match path.strip_prefix(config.cwd()) { - Ok(path) if path.file_name() == Some(path.as_os_str()) => Path::new(".").join(path), - Ok(path) => path.to_path_buf(), - Err(_) => path.to_path_buf(), - }; - let pkg = bins[0].0; - let mut process = compile.target_process(exe, unit.kind, pkg, *script_meta)?; - process.args(args).cwd(config.cwd()); - - config.shell().status("Running", process.to_string())?; - - process.exec_replace() -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_test.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_test.rs deleted file mode 100644 index c461c93a6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_test.rs +++ /dev/null @@ -1,266 +0,0 @@ -use crate::core::compiler::{Compilation, CompileKind, Doctest, UnitOutput}; -use crate::core::shell::Verbosity; -use crate::core::{TargetKind, Workspace}; -use crate::ops; -use crate::util::errors::CargoResult; -use crate::util::{add_path_args, CargoTestError, Config, Test}; -use cargo_util::ProcessError; -use std::ffi::OsString; - -pub struct TestOptions { - pub compile_opts: ops::CompileOptions, - pub no_run: bool, - pub no_fail_fast: bool, -} - -pub fn run_tests( - ws: &Workspace<'_>, - options: &TestOptions, - test_args: &[&str], -) -> CargoResult> { - let compilation = compile_tests(ws, options)?; - - if options.no_run { - return Ok(None); - } - let (test, mut errors) = run_unit_tests(ws.config(), options, test_args, &compilation)?; - - // If we have an error and want to fail fast, then return. - if !errors.is_empty() && !options.no_fail_fast { - return Ok(Some(CargoTestError::new(test, errors))); - } - - let (doctest, docerrors) = run_doc_tests(ws, options, test_args, &compilation)?; - let test = if docerrors.is_empty() { test } else { doctest }; - errors.extend(docerrors); - if errors.is_empty() { - Ok(None) - } else { - Ok(Some(CargoTestError::new(test, errors))) - } -} - -pub fn run_benches( - ws: &Workspace<'_>, - options: &TestOptions, - args: &[&str], -) -> CargoResult> { - let compilation = compile_tests(ws, options)?; - - if options.no_run { - return Ok(None); - } - - let mut args = args.to_vec(); - args.push("--bench"); - - let (test, errors) = run_unit_tests(ws.config(), options, &args, &compilation)?; - - match errors.len() { - 0 => Ok(None), - _ => Ok(Some(CargoTestError::new(test, errors))), - } -} - -fn compile_tests<'a>(ws: &Workspace<'a>, options: &TestOptions) -> CargoResult> { - let mut compilation = ops::compile(ws, &options.compile_opts)?; - compilation.tests.sort(); - Ok(compilation) -} - -/// Runs the unit and integration tests of a package. -fn run_unit_tests( - config: &Config, - options: &TestOptions, - test_args: &[&str], - compilation: &Compilation<'_>, -) -> CargoResult<(Test, Vec)> { - let cwd = config.cwd(); - let mut errors = Vec::new(); - - for UnitOutput { - unit, - path, - script_meta, - } in compilation.tests.iter() - { - let test_path = unit.target.src_path().path().unwrap(); - let exe_display = if let TargetKind::Test = unit.target.kind() { - format!( - "{} ({})", - test_path - .strip_prefix(unit.pkg.root()) - .unwrap_or(test_path) - .display(), - path.strip_prefix(cwd).unwrap_or(path).display() - ) - } else { - format!( - "unittests ({})", - path.strip_prefix(cwd).unwrap_or(path).display() - ) - }; - - let mut cmd = compilation.target_process(path, unit.kind, &unit.pkg, *script_meta)?; - cmd.args(test_args); - if unit.target.harness() && config.shell().verbosity() == Verbosity::Quiet { - cmd.arg("--quiet"); - } - config - .shell() - .concise(|shell| shell.status("Running", &exe_display))?; - config - .shell() - .verbose(|shell| shell.status("Running", &cmd))?; - - let result = cmd.exec(); - - if let Err(e) = result { - let e = e.downcast::()?; - errors.push(( - unit.target.kind().clone(), - unit.target.name().to_string(), - unit.pkg.name().to_string(), - e, - )); - if !options.no_fail_fast { - break; - } - } - } - - if errors.len() == 1 { - let (kind, name, pkg_name, e) = errors.pop().unwrap(); - Ok(( - Test::UnitTest { - kind, - name, - pkg_name, - }, - vec![e], - )) - } else { - Ok(( - Test::Multiple, - errors.into_iter().map(|(_, _, _, e)| e).collect(), - )) - } -} - -fn run_doc_tests( - ws: &Workspace<'_>, - options: &TestOptions, - test_args: &[&str], - compilation: &Compilation<'_>, -) -> CargoResult<(Test, Vec)> { - let config = ws.config(); - let mut errors = Vec::new(); - let doctest_xcompile = config.cli_unstable().doctest_xcompile; - let doctest_in_workspace = config.cli_unstable().doctest_in_workspace; - - for doctest_info in &compilation.to_doc_test { - let Doctest { - args, - unstable_opts, - unit, - linker, - script_meta, - } = doctest_info; - - if !doctest_xcompile { - match unit.kind { - CompileKind::Host => {} - CompileKind::Target(target) => { - if target.short_name() != compilation.host { - // Skip doctests, -Zdoctest-xcompile not enabled. - config.shell().verbose(|shell| { - shell.note(format!( - "skipping doctests for {} ({}), \ - cross-compilation doctests are not yet supported\n\ - See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \ - for more information.", - unit.pkg, - unit.target.description_named() - )) - })?; - continue; - } - } - } - } - - config.shell().status("Doc-tests", unit.target.name())?; - let mut p = compilation.rustdoc_process(unit, *script_meta)?; - p.arg("--crate-name").arg(&unit.target.crate_name()); - p.arg("--test"); - - if doctest_in_workspace { - add_path_args(ws, unit, &mut p); - // FIXME(swatinem): remove the `unstable-options` once rustdoc stabilizes the `test-run-directory` option - p.arg("-Z").arg("unstable-options"); - p.arg("--test-run-directory") - .arg(unit.pkg.root().to_path_buf()); - } else { - p.arg(unit.target.src_path().path().unwrap()); - } - - if doctest_xcompile { - if let CompileKind::Target(target) = unit.kind { - // use `rustc_target()` to properly handle JSON target paths - p.arg("--target").arg(target.rustc_target()); - } - p.arg("-Zunstable-options"); - p.arg("--enable-per-target-ignores"); - if let Some((runtool, runtool_args)) = compilation.target_runner(unit.kind) { - p.arg("--runtool").arg(runtool); - for arg in runtool_args { - p.arg("--runtool-arg").arg(arg); - } - } - if let Some(linker) = linker { - let mut joined = OsString::from("linker="); - joined.push(linker); - p.arg("-C").arg(joined); - } - } - - for &rust_dep in &[ - &compilation.deps_output[&unit.kind], - &compilation.deps_output[&CompileKind::Host], - ] { - let mut arg = OsString::from("dependency="); - arg.push(rust_dep); - p.arg("-L").arg(arg); - } - - for native_dep in compilation.native_dirs.iter() { - p.arg("-L").arg(native_dep); - } - - for arg in test_args { - p.arg("--test-args").arg(arg); - } - - if config.shell().verbosity() == Verbosity::Quiet { - p.arg("--test-args").arg("--quiet"); - } - - p.args(args); - - if *unstable_opts { - p.arg("-Zunstable-options"); - } - - config - .shell() - .verbose(|shell| shell.status("Running", p.to_string()))?; - if let Err(e) = p.exec() { - let e = e.downcast::()?; - errors.push(e); - if !options.no_fail_fast { - return Ok((Test::Doc, errors)); - } - } - } - Ok((Test::Doc, errors)) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_uninstall.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_uninstall.rs deleted file mode 100644 index 355154418..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/cargo_uninstall.rs +++ /dev/null @@ -1,155 +0,0 @@ -use crate::core::PackageId; -use crate::core::{PackageIdSpec, SourceId}; -use crate::ops::common_for_install_and_uninstall::*; -use crate::sources::PathSource; -use crate::util::errors::CargoResult; -use crate::util::Config; -use crate::util::Filesystem; -use anyhow::bail; -use cargo_util::paths; -use std::collections::BTreeSet; -use std::env; - -pub fn uninstall( - root: Option<&str>, - specs: Vec<&str>, - bins: &[String], - config: &Config, -) -> CargoResult<()> { - if specs.len() > 1 && !bins.is_empty() { - bail!("A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant."); - } - - let root = resolve_root(root, config)?; - let scheduled_error = if specs.len() == 1 { - uninstall_one(&root, specs[0], bins, config)?; - false - } else if specs.is_empty() { - uninstall_cwd(&root, bins, config)?; - false - } else { - let mut succeeded = vec![]; - let mut failed = vec![]; - for spec in specs { - let root = root.clone(); - match uninstall_one(&root, spec, bins, config) { - Ok(()) => succeeded.push(spec), - Err(e) => { - crate::display_error(&e, &mut config.shell()); - failed.push(spec) - } - } - } - - let mut summary = vec![]; - if !succeeded.is_empty() { - summary.push(format!( - "Successfully uninstalled {}!", - succeeded.join(", ") - )); - } - if !failed.is_empty() { - summary.push(format!( - "Failed to uninstall {} (see error(s) above).", - failed.join(", ") - )); - } - - if !succeeded.is_empty() || !failed.is_empty() { - config.shell().status("Summary", summary.join(" "))?; - } - - !failed.is_empty() - }; - - if scheduled_error { - bail!("some packages failed to uninstall"); - } - - Ok(()) -} - -pub fn uninstall_one( - root: &Filesystem, - spec: &str, - bins: &[String], - config: &Config, -) -> CargoResult<()> { - let tracker = InstallTracker::load(config, root)?; - let all_pkgs = tracker.all_installed_bins().map(|(pkg_id, _set)| *pkg_id); - let pkgid = PackageIdSpec::query_str(spec, all_pkgs)?; - uninstall_pkgid(root, tracker, pkgid, bins, config) -} - -fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> { - let tracker = InstallTracker::load(config, root)?; - let source_id = SourceId::for_path(config.cwd())?; - let mut src = path_source(source_id, config)?; - let pkg = select_pkg( - &mut src, - None, - |path: &mut PathSource<'_>| path.read_packages(), - config, - )?; - let pkgid = pkg.package_id(); - uninstall_pkgid(root, tracker, pkgid, bins, config) -} - -fn uninstall_pkgid( - root: &Filesystem, - mut tracker: InstallTracker, - pkgid: PackageId, - bins: &[String], - config: &Config, -) -> CargoResult<()> { - let mut to_remove = Vec::new(); - let installed = match tracker.installed_bins(pkgid) { - Some(bins) => bins.clone(), - None => bail!("package `{}` is not installed", pkgid), - }; - - let dst = root.join("bin").into_path_unlocked(); - for bin in &installed { - let bin = dst.join(bin); - if !bin.exists() { - bail!( - "corrupt metadata, `{}` does not exist when it should", - bin.display() - ) - } - } - - let bins = bins - .iter() - .map(|s| { - if s.ends_with(env::consts::EXE_SUFFIX) { - s.to_string() - } else { - format!("{}{}", s, env::consts::EXE_SUFFIX) - } - }) - .collect::>(); - - for bin in bins.iter() { - if !installed.contains(bin) { - bail!("binary `{}` not installed as part of `{}`", bin, pkgid) - } - } - - if bins.is_empty() { - to_remove.extend(installed.iter().map(|b| dst.join(b))); - tracker.remove(pkgid, &installed); - } else { - for bin in bins.iter() { - to_remove.push(dst.join(bin)); - } - tracker.remove(pkgid, &bins); - } - tracker.save()?; - for bin in to_remove { - config.shell().status("Removing", bin.display())?; - paths::remove_file(bin)?; - } - - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/common_for_install_and_uninstall.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/common_for_install_and_uninstall.rs deleted file mode 100644 index 444e57cfd..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/common_for_install_and_uninstall.rs +++ /dev/null @@ -1,696 +0,0 @@ -use std::collections::{btree_map, BTreeMap, BTreeSet}; -use std::env; -use std::io::prelude::*; -use std::io::SeekFrom; -use std::path::{Path, PathBuf}; -use std::rc::Rc; - -use anyhow::{bail, format_err, Context as _}; -use serde::{Deserialize, Serialize}; - -use crate::core::compiler::Freshness; -use crate::core::{Dependency, FeatureValue, Package, PackageId, Source, SourceId}; -use crate::ops::{self, CompileFilter, CompileOptions}; -use crate::sources::PathSource; -use crate::util::errors::CargoResult; -use crate::util::Config; -use crate::util::{FileLock, Filesystem}; - -/// On-disk tracking for which package installed which binary. -/// -/// v1 is an older style, v2 is a new style that tracks more information, and -/// is both backwards and forwards compatible. Cargo keeps both files in sync, -/// updating both v1 and v2 at the same time. Additionally, if it detects -/// changes in v1 that are not in v2 (such as when an older version of Cargo -/// is used), it will automatically propagate those changes to v2. -/// -/// This maintains a filesystem lock, preventing other instances of Cargo from -/// modifying at the same time. Drop the value to unlock. -/// -/// It is intended that v1 should be retained for a while during a longish -/// transition period, and then v1 can be removed. -pub struct InstallTracker { - v1: CrateListingV1, - v2: CrateListingV2, - v1_lock: FileLock, - v2_lock: FileLock, -} - -/// Tracking information for the set of installed packages. -#[derive(Default, Deserialize, Serialize)] -struct CrateListingV2 { - /// Map of every installed package. - installs: BTreeMap, - /// Forwards compatibility. Unknown keys from future versions of Cargo - /// will be stored here and retained when the file is saved. - #[serde(flatten)] - other: BTreeMap, -} - -/// Tracking information for the installation of a single package. -/// -/// This tracks the settings that were used when the package was installed. -/// Future attempts to install the same package will check these settings to -/// determine if it needs to be rebuilt/reinstalled. If nothing has changed, -/// then Cargo will inform the user that it is "up to date". -/// -/// This is only used for the v2 format. -#[derive(Debug, Deserialize, Serialize)] -struct InstallInfo { - /// Version requested via `--version`. - /// None if `--version` not specified. Currently not used, possibly may be - /// used in the future. - version_req: Option, - /// Set of binary names installed. - bins: BTreeSet, - /// Set of features explicitly enabled. - features: BTreeSet, - all_features: bool, - no_default_features: bool, - /// Either "debug" or "release". - profile: String, - /// The installation target. - /// Either the host or the value specified in `--target`. - /// None if unknown (when loading from v1). - target: Option, - /// Output of `rustc -V`. - /// None if unknown (when loading from v1). - /// Currently not used, possibly may be used in the future. - rustc: Option, - /// Forwards compatibility. - #[serde(flatten)] - other: BTreeMap, -} - -/// Tracking information for the set of installed packages. -#[derive(Default, Deserialize, Serialize)] -pub struct CrateListingV1 { - /// Map of installed package id to the set of binary names for that package. - v1: BTreeMap>, -} - -impl InstallTracker { - /// Create an InstallTracker from information on disk. - pub fn load(config: &Config, root: &Filesystem) -> CargoResult { - let v1_lock = root.open_rw(Path::new(".crates.toml"), config, "crate metadata")?; - let v2_lock = root.open_rw(Path::new(".crates2.json"), config, "crate metadata")?; - - let v1 = (|| -> CargoResult<_> { - let mut contents = String::new(); - v1_lock.file().read_to_string(&mut contents)?; - if contents.is_empty() { - Ok(CrateListingV1::default()) - } else { - Ok(toml::from_str(&contents).with_context(|| "invalid TOML found for metadata")?) - } - })() - .with_context(|| { - format!( - "failed to parse crate metadata at `{}`", - v1_lock.path().to_string_lossy() - ) - })?; - - let v2 = (|| -> CargoResult<_> { - let mut contents = String::new(); - v2_lock.file().read_to_string(&mut contents)?; - let mut v2 = if contents.is_empty() { - CrateListingV2::default() - } else { - serde_json::from_str(&contents) - .with_context(|| "invalid JSON found for metadata")? - }; - v2.sync_v1(&v1); - Ok(v2) - })() - .with_context(|| { - format!( - "failed to parse crate metadata at `{}`", - v2_lock.path().to_string_lossy() - ) - })?; - - Ok(InstallTracker { - v1, - v2, - v1_lock, - v2_lock, - }) - } - - /// Checks if the given package should be built, and checks if executables - /// already exist in the destination directory. - /// - /// Returns a tuple `(freshness, map)`. `freshness` indicates if the - /// package should be built (`Dirty`) or if it is already up-to-date - /// (`Fresh`) and should be skipped. The map maps binary names to the - /// PackageId that installed it (which is None if not known). - /// - /// If there are no duplicates, then it will be considered `Dirty` (i.e., - /// it is OK to build/install). - /// - /// `force=true` will always be considered `Dirty` (i.e., it will always - /// be rebuilt/reinstalled). - /// - /// Returns an error if there is a duplicate and `--force` is not used. - pub fn check_upgrade( - &self, - dst: &Path, - pkg: &Package, - force: bool, - opts: &CompileOptions, - target: &str, - _rustc: &str, - ) -> CargoResult<(Freshness, BTreeMap>)> { - let exes = exe_names(pkg, &opts.filter); - // Check if any tracked exe's are already installed. - let duplicates = self.find_duplicates(dst, &exes); - if force || duplicates.is_empty() { - return Ok((Freshness::Dirty, duplicates)); - } - // Check if all duplicates come from packages of the same name. If - // there are duplicates from other packages, then --force will be - // required. - // - // There may be multiple matching duplicates if different versions of - // the same package installed different binaries. - // - // This does not check the source_id in order to allow the user to - // switch between different sources. For example, installing from git, - // and then switching to the official crates.io release or vice-versa. - // If the source_id were included, then the user would get possibly - // confusing errors like "package `foo 1.0.0` is already installed" - // and the change of source may not be obvious why it fails. - let matching_duplicates: Vec = duplicates - .values() - .filter_map(|v| match v { - Some(dupe_pkg_id) if dupe_pkg_id.name() == pkg.name() => Some(*dupe_pkg_id), - _ => None, - }) - .collect(); - - // If both sets are the same length, that means all duplicates come - // from packages with the same name. - if matching_duplicates.len() == duplicates.len() { - // Determine if it is dirty or fresh. - let source_id = pkg.package_id().source_id(); - if source_id.is_path() { - // `cargo install --path ...` is always rebuilt. - return Ok((Freshness::Dirty, duplicates)); - } - let is_up_to_date = |dupe_pkg_id| { - let info = self - .v2 - .installs - .get(dupe_pkg_id) - .expect("dupes must be in sync"); - let precise_equal = if source_id.is_git() { - // Git sources must have the exact same hash to be - // considered "fresh". - dupe_pkg_id.source_id().precise() == source_id.precise() - } else { - true - }; - - dupe_pkg_id.version() == pkg.version() - && dupe_pkg_id.source_id() == source_id - && precise_equal - && info.is_up_to_date(opts, target, &exes) - }; - if matching_duplicates.iter().all(is_up_to_date) { - Ok((Freshness::Fresh, duplicates)) - } else { - Ok((Freshness::Dirty, duplicates)) - } - } else { - // Format the error message. - let mut msg = String::new(); - for (bin, p) in duplicates.iter() { - msg.push_str(&format!("binary `{}` already exists in destination", bin)); - if let Some(p) = p.as_ref() { - msg.push_str(&format!(" as part of `{}`\n", p)); - } else { - msg.push('\n'); - } - } - msg.push_str("Add --force to overwrite"); - bail!("{}", msg); - } - } - - /// Check if any executables are already installed. - /// - /// Returns a map of duplicates, the key is the executable name and the - /// value is the PackageId that is already installed. The PackageId is - /// None if it is an untracked executable. - fn find_duplicates( - &self, - dst: &Path, - exes: &BTreeSet, - ) -> BTreeMap> { - exes.iter() - .filter_map(|name| { - if !dst.join(&name).exists() { - None - } else { - let p = self.v2.package_for_bin(name); - Some((name.clone(), p)) - } - }) - .collect() - } - - /// Mark that a package was installed. - pub fn mark_installed( - &mut self, - package: &Package, - bins: &BTreeSet, - version_req: Option, - opts: &CompileOptions, - target: &str, - rustc: &str, - ) { - self.v2 - .mark_installed(package, bins, version_req, opts, target, rustc); - self.v1.mark_installed(package, bins); - } - - /// Save tracking information to disk. - pub fn save(&self) -> CargoResult<()> { - self.v1.save(&self.v1_lock).with_context(|| { - format!( - "failed to write crate metadata at `{}`", - self.v1_lock.path().to_string_lossy() - ) - })?; - - self.v2.save(&self.v2_lock).with_context(|| { - format!( - "failed to write crate metadata at `{}`", - self.v2_lock.path().to_string_lossy() - ) - })?; - Ok(()) - } - - /// Iterator of all installed binaries. - /// Items are `(pkg_id, bins)` where `bins` is the set of binaries that - /// package installed. - pub fn all_installed_bins(&self) -> impl Iterator)> { - self.v1.v1.iter() - } - - /// Set of binaries installed by a particular package. - /// Returns None if the package is not installed. - pub fn installed_bins(&self, pkg_id: PackageId) -> Option<&BTreeSet> { - self.v1.v1.get(&pkg_id) - } - - /// Remove a package from the tracker. - pub fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { - self.v1.remove(pkg_id, bins); - self.v2.remove(pkg_id, bins); - } -} - -impl CrateListingV1 { - fn mark_installed(&mut self, pkg: &Package, bins: &BTreeSet) { - // Remove bins from any other packages. - for other_bins in self.v1.values_mut() { - for bin in bins { - other_bins.remove(bin); - } - } - // Remove entries where `bins` is empty. - let to_remove = self - .v1 - .iter() - .filter_map(|(&p, set)| if set.is_empty() { Some(p) } else { None }) - .collect::>(); - for p in to_remove.iter() { - self.v1.remove(p); - } - // Add these bins. - self.v1 - .entry(pkg.package_id()) - .or_insert_with(BTreeSet::new) - .append(&mut bins.clone()); - } - - fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { - let mut installed = match self.v1.entry(pkg_id) { - btree_map::Entry::Occupied(e) => e, - btree_map::Entry::Vacant(..) => panic!("v1 unexpected missing `{}`", pkg_id), - }; - - for bin in bins { - installed.get_mut().remove(bin); - } - if installed.get().is_empty() { - installed.remove(); - } - } - - fn save(&self, lock: &FileLock) -> CargoResult<()> { - let mut file = lock.file(); - file.seek(SeekFrom::Start(0))?; - file.set_len(0)?; - let data = toml::to_string(self)?; - file.write_all(data.as_bytes())?; - Ok(()) - } -} - -impl CrateListingV2 { - /// Incorporate any changes from v1 into self. - /// This handles the initial upgrade to v2, *and* handles the case - /// where v2 is in use, and a v1 update is made, then v2 is used again. - /// i.e., `cargo +new install foo ; cargo +old install bar ; cargo +new install bar` - /// For now, v1 is the source of truth, so its values are trusted over v2. - fn sync_v1(&mut self, v1: &CrateListingV1) { - // Make the `bins` entries the same. - for (pkg_id, bins) in &v1.v1 { - self.installs - .entry(*pkg_id) - .and_modify(|info| info.bins = bins.clone()) - .or_insert_with(|| InstallInfo::from_v1(bins)); - } - // Remove any packages that aren't present in v1. - let to_remove: Vec<_> = self - .installs - .keys() - .filter(|pkg_id| !v1.v1.contains_key(pkg_id)) - .cloned() - .collect(); - for pkg_id in to_remove { - self.installs.remove(&pkg_id); - } - } - - fn package_for_bin(&self, bin_name: &str) -> Option { - self.installs - .iter() - .find(|(_, info)| info.bins.contains(bin_name)) - .map(|(pkg_id, _)| *pkg_id) - } - - fn mark_installed( - &mut self, - pkg: &Package, - bins: &BTreeSet, - version_req: Option, - opts: &CompileOptions, - target: &str, - rustc: &str, - ) { - // Remove bins from any other packages. - for info in &mut self.installs.values_mut() { - for bin in bins { - info.bins.remove(bin); - } - } - // Remove entries where `bins` is empty. - let to_remove = self - .installs - .iter() - .filter_map(|(&p, info)| if info.bins.is_empty() { Some(p) } else { None }) - .collect::>(); - for p in to_remove.iter() { - self.installs.remove(p); - } - // Add these bins. - if let Some(info) = self.installs.get_mut(&pkg.package_id()) { - info.bins.append(&mut bins.clone()); - info.version_req = version_req; - info.features = feature_set(&opts.cli_features.features); - info.all_features = opts.cli_features.all_features; - info.no_default_features = !opts.cli_features.uses_default_features; - info.profile = opts.build_config.requested_profile.to_string(); - info.target = Some(target.to_string()); - info.rustc = Some(rustc.to_string()); - } else { - self.installs.insert( - pkg.package_id(), - InstallInfo { - version_req, - bins: bins.clone(), - features: feature_set(&opts.cli_features.features), - all_features: opts.cli_features.all_features, - no_default_features: !opts.cli_features.uses_default_features, - profile: opts.build_config.requested_profile.to_string(), - target: Some(target.to_string()), - rustc: Some(rustc.to_string()), - other: BTreeMap::new(), - }, - ); - } - } - - fn remove(&mut self, pkg_id: PackageId, bins: &BTreeSet) { - let mut info_entry = match self.installs.entry(pkg_id) { - btree_map::Entry::Occupied(e) => e, - btree_map::Entry::Vacant(..) => panic!("v2 unexpected missing `{}`", pkg_id), - }; - - for bin in bins { - info_entry.get_mut().bins.remove(bin); - } - if info_entry.get().bins.is_empty() { - info_entry.remove(); - } - } - - fn save(&self, lock: &FileLock) -> CargoResult<()> { - let mut file = lock.file(); - file.seek(SeekFrom::Start(0))?; - file.set_len(0)?; - let data = serde_json::to_string(self)?; - file.write_all(data.as_bytes())?; - Ok(()) - } -} - -impl InstallInfo { - fn from_v1(set: &BTreeSet) -> InstallInfo { - InstallInfo { - version_req: None, - bins: set.clone(), - features: BTreeSet::new(), - all_features: false, - no_default_features: false, - profile: "release".to_string(), - target: None, - rustc: None, - other: BTreeMap::new(), - } - } - - /// Determine if this installation is "up to date", or if it needs to be reinstalled. - /// - /// This does not do Package/Source/Version checking. - fn is_up_to_date(&self, opts: &CompileOptions, target: &str, exes: &BTreeSet) -> bool { - self.features == feature_set(&opts.cli_features.features) - && self.all_features == opts.cli_features.all_features - && self.no_default_features != opts.cli_features.uses_default_features - && self.profile.as_str() == opts.build_config.requested_profile.as_str() - && (self.target.is_none() || self.target.as_deref() == Some(target)) - && &self.bins == exes - } -} - -/// Determines the root directory where installation is done. -pub fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult { - let config_root = config.get_path("install.root")?; - Ok(flag - .map(PathBuf::from) - .or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)) - .or_else(move || config_root.map(|v| v.val)) - .map(Filesystem::new) - .unwrap_or_else(|| config.home().clone())) -} - -/// Determines the `PathSource` from a `SourceId`. -pub fn path_source(source_id: SourceId, config: &Config) -> CargoResult> { - let path = source_id - .url() - .to_file_path() - .map_err(|()| format_err!("path sources must have a valid path"))?; - Ok(PathSource::new(&path, source_id, config)) -} - -/// Gets a Package based on command-line requirements. -pub fn select_dep_pkg( - source: &mut T, - dep: Dependency, - config: &Config, - needs_update: bool, -) -> CargoResult -where - T: Source, -{ - // This operation may involve updating some sources or making a few queries - // which may involve frobbing caches, as a result make sure we synchronize - // with other global Cargos - let _lock = config.acquire_package_cache_lock()?; - - if needs_update { - source.update()?; - } - - let deps = source.query_vec(&dep)?; - match deps.iter().map(|p| p.package_id()).max() { - Some(pkgid) => { - let pkg = Box::new(source).download_now(pkgid, config)?; - Ok(pkg) - } - None => { - let is_yanked: bool = if dep.version_req().is_exact() { - let version: String = dep.version_req().to_string(); - PackageId::new(dep.package_name(), &version[1..], source.source_id()) - .map_or(false, |pkg_id| source.is_yanked(pkg_id).unwrap_or(false)) - } else { - false - }; - if is_yanked { - bail!( - "cannot install package `{}`, it has been yanked from {}", - dep.package_name(), - source.source_id() - ) - } else { - bail!( - "could not find `{}` in {} with version `{}`", - dep.package_name(), - source.source_id(), - dep.version_req(), - ) - } - } - } -} - -pub fn select_pkg( - source: &mut T, - dep: Option, - mut list_all: F, - config: &Config, -) -> CargoResult -where - T: Source, - F: FnMut(&mut T) -> CargoResult>, -{ - // This operation may involve updating some sources or making a few queries - // which may involve frobbing caches, as a result make sure we synchronize - // with other global Cargos - let _lock = config.acquire_package_cache_lock()?; - - source.update()?; - - return if let Some(dep) = dep { - select_dep_pkg(source, dep, config, false) - } else { - let candidates = list_all(source)?; - let binaries = candidates - .iter() - .filter(|cand| cand.targets().iter().filter(|t| t.is_bin()).count() > 0); - let examples = candidates - .iter() - .filter(|cand| cand.targets().iter().filter(|t| t.is_example()).count() > 0); - let pkg = match one(binaries, |v| multi_err("binaries", v))? { - Some(p) => p, - None => match one(examples, |v| multi_err("examples", v))? { - Some(p) => p, - None => bail!( - "no packages found with binaries or \ - examples" - ), - }, - }; - Ok(pkg.clone()) - }; - - fn multi_err(kind: &str, mut pkgs: Vec<&Package>) -> String { - pkgs.sort_unstable_by_key(|a| a.name()); - format!( - "multiple packages with {} found: {}. When installing a git repository, \ - cargo will always search the entire repo for any Cargo.toml. \ - Please specify which to install.", - kind, - pkgs.iter() - .map(|p| p.name().as_str()) - .collect::>() - .join(", ") - ) - } -} - -/// Get one element from the iterator. -/// Returns None if none left. -/// Returns error if there is more than one item in the iterator. -fn one(mut i: I, f: F) -> CargoResult> -where - I: Iterator, - F: FnOnce(Vec) -> String, -{ - match (i.next(), i.next()) { - (Some(i1), Some(i2)) => { - let mut v = vec![i1, i2]; - v.extend(i); - Err(format_err!("{}", f(v))) - } - (Some(i), None) => Ok(Some(i)), - (None, _) => Ok(None), - } -} - -/// Helper to convert features to a BTreeSet. -fn feature_set(features: &Rc>) -> BTreeSet { - features.iter().map(|s| s.to_string()).collect() -} - -/// Helper to get the executable names from a filter. -pub fn exe_names(pkg: &Package, filter: &ops::CompileFilter) -> BTreeSet { - let to_exe = |name| format!("{}{}", name, env::consts::EXE_SUFFIX); - match filter { - CompileFilter::Default { .. } => pkg - .targets() - .iter() - .filter(|t| t.is_bin()) - .map(|t| to_exe(t.name())) - .collect(), - CompileFilter::Only { - all_targets: true, .. - } => pkg - .targets() - .iter() - .filter(|target| target.is_executable()) - .map(|target| to_exe(target.name())) - .collect(), - CompileFilter::Only { - ref bins, - ref examples, - .. - } => { - let all_bins: Vec = bins.try_collect().unwrap_or_else(|| { - pkg.targets() - .iter() - .filter(|t| t.is_bin()) - .map(|t| t.name().to_string()) - .collect() - }); - let all_examples: Vec = examples.try_collect().unwrap_or_else(|| { - pkg.targets() - .iter() - .filter(|t| t.is_exe_example()) - .map(|t| t.name().to_string()) - .collect() - }); - - all_bins - .iter() - .chain(all_examples.iter()) - .map(|name| to_exe(name)) - .collect() - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/fix.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/fix.rs deleted file mode 100644 index 81b379bec..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/fix.rs +++ /dev/null @@ -1,927 +0,0 @@ -//! High-level overview of how `fix` works: -//! -//! The main goal is to run `cargo check` to get rustc to emit JSON -//! diagnostics with suggested fixes that can be applied to the files on the -//! filesystem, and validate that those changes didn't break anything. -//! -//! Cargo begins by launching a `LockServer` thread in the background to -//! listen for network connections to coordinate locking when multiple targets -//! are built simultaneously. It ensures each package has only one fix running -//! at once. -//! -//! The `RustfixDiagnosticServer` is launched in a background thread (in -//! `JobQueue`) to listen for network connections to coordinate displaying -//! messages to the user on the console (so that multiple processes don't try -//! to print at the same time). -//! -//! Cargo begins a normal `cargo check` operation with itself set as a proxy -//! for rustc by setting `primary_unit_rustc` in the build config. When -//! cargo launches rustc to check a crate, it is actually launching itself. -//! The `FIX_ENV` environment variable is set so that cargo knows it is in -//! fix-proxy-mode. -//! -//! Each proxied cargo-as-rustc detects it is in fix-proxy-mode (via `FIX_ENV` -//! environment variable in `main`) and does the following: -//! -//! - Acquire a lock from the `LockServer` from the master cargo process. -//! - Launches the real rustc (`rustfix_and_fix`), looking at the JSON output -//! for suggested fixes. -//! - Uses the `rustfix` crate to apply the suggestions to the files on the -//! file system. -//! - If rustfix fails to apply any suggestions (for example, they are -//! overlapping), but at least some suggestions succeeded, it will try the -//! previous two steps up to 4 times as long as some suggestions succeed. -//! - Assuming there's at least one suggestion applied, and the suggestions -//! applied cleanly, rustc is run again to verify the suggestions didn't -//! break anything. The change will be backed out if it fails (unless -//! `--broken-code` is used). -//! - If there are any warnings or errors, rustc will be run one last time to -//! show them to the user. - -use std::collections::{BTreeSet, HashMap, HashSet}; -use std::env; -use std::ffi::OsString; -use std::path::{Path, PathBuf}; -use std::process::{self, Command, ExitStatus}; -use std::str; - -use anyhow::{bail, Context, Error}; -use cargo_util::{exit_status_to_string, is_simple_exit_code, paths, ProcessBuilder}; -use log::{debug, trace, warn}; -use rustfix::diagnostics::Diagnostic; -use rustfix::{self, CodeFix}; -use semver::Version; - -use crate::core::compiler::RustcTargetData; -use crate::core::resolver::features::{DiffMap, FeatureOpts, FeatureResolver}; -use crate::core::resolver::{HasDevUnits, Resolve, ResolveBehavior}; -use crate::core::{Edition, MaybePackage, PackageId, Workspace}; -use crate::ops::resolve::WorkspaceResolve; -use crate::ops::{self, CompileOptions}; -use crate::util::diagnostic_server::{Message, RustfixDiagnosticServer}; -use crate::util::errors::CargoResult; -use crate::util::Config; -use crate::util::{existing_vcs_repo, LockServer, LockServerClient}; -use crate::{drop_eprint, drop_eprintln}; - -const FIX_ENV: &str = "__CARGO_FIX_PLZ"; -const BROKEN_CODE_ENV: &str = "__CARGO_FIX_BROKEN_CODE"; -const EDITION_ENV: &str = "__CARGO_FIX_EDITION"; -const IDIOMS_ENV: &str = "__CARGO_FIX_IDIOMS"; - -pub struct FixOptions { - pub edition: bool, - pub idioms: bool, - pub compile_opts: CompileOptions, - pub allow_dirty: bool, - pub allow_no_vcs: bool, - pub allow_staged: bool, - pub broken_code: bool, -} - -pub fn fix(ws: &Workspace<'_>, opts: &mut FixOptions) -> CargoResult<()> { - check_version_control(ws.config(), opts)?; - if opts.edition { - check_resolver_change(ws, opts)?; - } - - // Spin up our lock server, which our subprocesses will use to synchronize fixes. - let lock_server = LockServer::new()?; - let mut wrapper = ProcessBuilder::new(env::current_exe()?); - wrapper.env(FIX_ENV, lock_server.addr().to_string()); - let _started = lock_server.start()?; - - opts.compile_opts.build_config.force_rebuild = true; - - if opts.broken_code { - wrapper.env(BROKEN_CODE_ENV, "1"); - } - - if opts.edition { - wrapper.env(EDITION_ENV, "1"); - } - if opts.idioms { - wrapper.env(IDIOMS_ENV, "1"); - } - - *opts - .compile_opts - .build_config - .rustfix_diagnostic_server - .borrow_mut() = Some(RustfixDiagnosticServer::new()?); - - if let Some(server) = opts - .compile_opts - .build_config - .rustfix_diagnostic_server - .borrow() - .as_ref() - { - server.configure(&mut wrapper); - } - - let rustc = ws.config().load_global_rustc(Some(ws))?; - wrapper.arg(&rustc.path); - - // primary crates are compiled using a cargo subprocess to do extra work of applying fixes and - // repeating build until there are no more changes to be applied - opts.compile_opts.build_config.primary_unit_rustc = Some(wrapper); - - ops::compile(ws, &opts.compile_opts)?; - Ok(()) -} - -fn check_version_control(config: &Config, opts: &FixOptions) -> CargoResult<()> { - if opts.allow_no_vcs { - return Ok(()); - } - if !existing_vcs_repo(config.cwd(), config.cwd()) { - bail!( - "no VCS found for this package and `cargo fix` can potentially \ - perform destructive changes; if you'd like to suppress this \ - error pass `--allow-no-vcs`" - ) - } - - if opts.allow_dirty && opts.allow_staged { - return Ok(()); - } - - let mut dirty_files = Vec::new(); - let mut staged_files = Vec::new(); - if let Ok(repo) = git2::Repository::discover(config.cwd()) { - let mut repo_opts = git2::StatusOptions::new(); - repo_opts.include_ignored(false); - for status in repo.statuses(Some(&mut repo_opts))?.iter() { - if let Some(path) = status.path() { - match status.status() { - git2::Status::CURRENT => (), - git2::Status::INDEX_NEW - | git2::Status::INDEX_MODIFIED - | git2::Status::INDEX_DELETED - | git2::Status::INDEX_RENAMED - | git2::Status::INDEX_TYPECHANGE => { - if !opts.allow_staged { - staged_files.push(path.to_string()) - } - } - _ => { - if !opts.allow_dirty { - dirty_files.push(path.to_string()) - } - } - }; - } - } - } - - if dirty_files.is_empty() && staged_files.is_empty() { - return Ok(()); - } - - let mut files_list = String::new(); - for file in dirty_files { - files_list.push_str(" * "); - files_list.push_str(&file); - files_list.push_str(" (dirty)\n"); - } - for file in staged_files { - files_list.push_str(" * "); - files_list.push_str(&file); - files_list.push_str(" (staged)\n"); - } - - bail!( - "the working directory of this package has uncommitted changes, and \ - `cargo fix` can potentially perform destructive changes; if you'd \ - like to suppress this error pass `--allow-dirty`, `--allow-staged`, \ - or commit the changes to these files:\n\ - \n\ - {}\n\ - ", - files_list - ); -} - -fn check_resolver_change(ws: &Workspace<'_>, opts: &FixOptions) -> CargoResult<()> { - let root = ws.root_maybe(); - match root { - MaybePackage::Package(root_pkg) => { - if root_pkg.manifest().resolve_behavior().is_some() { - // If explicitly specified by the user, no need to check. - return Ok(()); - } - // Only trigger if updating the root package from 2018. - let pkgs = opts.compile_opts.spec.get_packages(ws)?; - if !pkgs.iter().any(|&pkg| pkg == root_pkg) { - // The root is not being migrated. - return Ok(()); - } - if root_pkg.manifest().edition() != Edition::Edition2018 { - // V1 to V2 only happens on 2018 to 2021. - return Ok(()); - } - } - MaybePackage::Virtual(_vm) => { - // Virtual workspaces don't have a global edition to set (yet). - return Ok(()); - } - } - // 2018 without `resolver` set must be V1 - assert_eq!(ws.resolve_behavior(), ResolveBehavior::V1); - let specs = opts.compile_opts.spec.to_package_id_specs(ws)?; - let target_data = RustcTargetData::new(ws, &opts.compile_opts.build_config.requested_kinds)?; - let resolve_differences = |has_dev_units| -> CargoResult<(WorkspaceResolve<'_>, DiffMap)> { - let ws_resolve = ops::resolve_ws_with_opts( - ws, - &target_data, - &opts.compile_opts.build_config.requested_kinds, - &opts.compile_opts.cli_features, - &specs, - has_dev_units, - crate::core::resolver::features::ForceAllTargets::No, - )?; - - let feature_opts = FeatureOpts::new_behavior(ResolveBehavior::V2, has_dev_units); - let v2_features = FeatureResolver::resolve( - ws, - &target_data, - &ws_resolve.targeted_resolve, - &ws_resolve.pkg_set, - &opts.compile_opts.cli_features, - &specs, - &opts.compile_opts.build_config.requested_kinds, - feature_opts, - )?; - - let diffs = v2_features.compare_legacy(&ws_resolve.resolved_features); - Ok((ws_resolve, diffs)) - }; - let (_, without_dev_diffs) = resolve_differences(HasDevUnits::No)?; - let (ws_resolve, mut with_dev_diffs) = resolve_differences(HasDevUnits::Yes)?; - if without_dev_diffs.is_empty() && with_dev_diffs.is_empty() { - // Nothing is different, nothing to report. - return Ok(()); - } - // Only display unique changes with dev-dependencies. - with_dev_diffs.retain(|k, vals| without_dev_diffs.get(k) != Some(vals)); - let config = ws.config(); - config.shell().note( - "Switching to Edition 2021 will enable the use of the version 2 feature resolver in Cargo.", - )?; - drop_eprintln!( - config, - "This may cause some dependencies to be built with fewer features enabled than previously." - ); - drop_eprintln!( - config, - "More information about the resolver changes may be found \ - at https://doc.rust-lang.org/nightly/edition-guide/rust-2021/default-cargo-resolver.html" - ); - drop_eprintln!( - config, - "When building the following dependencies, \ - the given features will no longer be used:\n" - ); - let show_diffs = |differences: DiffMap| { - for ((pkg_id, for_host), removed) in differences { - drop_eprint!(config, " {}", pkg_id); - if for_host { - drop_eprint!(config, " (as host dependency)"); - } - drop_eprint!(config, " removed features: "); - let joined: Vec<_> = removed.iter().map(|s| s.as_str()).collect(); - drop_eprintln!(config, "{}", joined.join(", ")); - } - drop_eprint!(config, "\n"); - }; - if !without_dev_diffs.is_empty() { - show_diffs(without_dev_diffs); - } - if !with_dev_diffs.is_empty() { - drop_eprintln!( - config, - "The following differences only apply when building with dev-dependencies:\n" - ); - show_diffs(with_dev_diffs); - } - report_maybe_diesel(config, &ws_resolve.targeted_resolve)?; - Ok(()) -} - -fn report_maybe_diesel(config: &Config, resolve: &Resolve) -> CargoResult<()> { - fn is_broken_diesel(pid: PackageId) -> bool { - pid.name() == "diesel" && pid.version() < &Version::new(1, 4, 8) - } - - fn is_broken_diesel_migration(pid: PackageId) -> bool { - pid.name() == "diesel_migrations" && pid.version().major <= 1 - } - - if resolve.iter().any(is_broken_diesel) && resolve.iter().any(is_broken_diesel_migration) { - config.shell().note( - "\ -This project appears to use both diesel and diesel_migrations. These packages have -a known issue where the build may fail due to the version 2 resolver preventing -feature unification between those two packages. Please update to at least diesel 1.4.8 -to prevent this issue from happening. -", - )?; - } - Ok(()) -} - -/// Entry point for `cargo` running as a proxy for `rustc`. -/// -/// This is called every time `cargo` is run to check if it is in proxy mode. -/// -/// Returns `false` if `fix` is not being run (not in proxy mode). Returns -/// `true` if in `fix` proxy mode, and the fix was complete without any -/// warnings or errors. If there are warnings or errors, this does not return, -/// and the process exits with the corresponding `rustc` exit code. -pub fn fix_maybe_exec_rustc(config: &Config) -> CargoResult { - let lock_addr = match env::var(FIX_ENV) { - Ok(s) => s, - Err(_) => return Ok(false), - }; - - let args = FixArgs::get()?; - trace!("cargo-fix as rustc got file {:?}", args.file); - - let workspace_rustc = std::env::var("RUSTC_WORKSPACE_WRAPPER") - .map(PathBuf::from) - .ok(); - let mut rustc = ProcessBuilder::new(&args.rustc).wrapped(workspace_rustc.as_ref()); - rustc.env_remove(FIX_ENV); - - trace!("start rustfixing {:?}", args.file); - let fixes = rustfix_crate(&lock_addr, &rustc, &args.file, &args, config)?; - - // Ok now we have our final goal of testing out the changes that we applied. - // If these changes went awry and actually started to cause the crate to - // *stop* compiling then we want to back them out and continue to print - // warnings to the user. - // - // If we didn't actually make any changes then we can immediately execute the - // new rustc, and otherwise we capture the output to hide it in the scenario - // that we have to back it all out. - if !fixes.files.is_empty() { - let mut cmd = rustc.build_command(); - args.apply(&mut cmd); - cmd.arg("--error-format=json"); - debug!("calling rustc for final verification: {:?}", cmd); - let output = cmd.output().context("failed to spawn rustc")?; - - if output.status.success() { - for (path, file) in fixes.files.iter() { - Message::Fixed { - file: path.clone(), - fixes: file.fixes_applied, - } - .post()?; - } - } - - // If we succeeded then we'll want to commit to the changes we made, if - // any. If stderr is empty then there's no need for the final exec at - // the end, we just bail out here. - if output.status.success() && output.stderr.is_empty() { - return Ok(true); - } - - // Otherwise, if our rustc just failed, then that means that we broke the - // user's code with our changes. Back out everything and fall through - // below to recompile again. - if !output.status.success() { - if env::var_os(BROKEN_CODE_ENV).is_none() { - for (path, file) in fixes.files.iter() { - debug!("reverting {:?} due to errors", path); - paths::write(path, &file.original_code)?; - } - } - log_failed_fix(&output.stderr, output.status)?; - } - } - - // This final fall-through handles multiple cases; - // - If the fix failed, show the original warnings and suggestions. - // - If `--broken-code`, show the error messages. - // - If the fix succeeded, show any remaining warnings. - let mut cmd = rustc.build_command(); - args.apply(&mut cmd); - for arg in args.format_args { - // Add any json/error format arguments that Cargo wants. This allows - // things like colored output to work correctly. - cmd.arg(arg); - } - debug!("calling rustc to display remaining diagnostics: {:?}", cmd); - exit_with(cmd.status().context("failed to spawn rustc")?); -} - -#[derive(Default)] -struct FixedCrate { - files: HashMap, -} - -struct FixedFile { - errors_applying_fixes: Vec, - fixes_applied: u32, - original_code: String, -} - -/// Attempts to apply fixes to a single crate. -/// -/// This runs `rustc` (possibly multiple times) to gather suggestions from the -/// compiler and applies them to the files on disk. -fn rustfix_crate( - lock_addr: &str, - rustc: &ProcessBuilder, - filename: &Path, - args: &FixArgs, - config: &Config, -) -> Result { - if !args.can_run_rustfix(config)? { - // This fix should not be run. Skipping... - return Ok(FixedCrate::default()); - } - - // First up, we want to make sure that each crate is only checked by one - // process at a time. If two invocations concurrently check a crate then - // it's likely to corrupt it. - // - // Historically this used per-source-file locking, then per-package - // locking. It now uses a single, global lock as some users do things like - // #[path] or include!() of shared files between packages. Serializing - // makes it slower, but is the only safe way to prevent concurrent - // modification. - let _lock = LockServerClient::lock(&lock_addr.parse()?, "global")?; - - // Next up, this is a bit suspicious, but we *iteratively* execute rustc and - // collect suggestions to feed to rustfix. Once we hit our limit of times to - // execute rustc or we appear to be reaching a fixed point we stop running - // rustc. - // - // This is currently done to handle code like: - // - // ::foo::<::Bar>(); - // - // where there are two fixes to happen here: `crate::foo::()`. - // The spans for these two suggestions are overlapping and its difficult in - // the compiler to **not** have overlapping spans here. As a result, a naive - // implementation would feed the two compiler suggestions for the above fix - // into `rustfix`, but one would be rejected because it overlaps with the - // other. - // - // In this case though, both suggestions are valid and can be automatically - // applied! To handle this case we execute rustc multiple times, collecting - // fixes each time we do so. Along the way we discard any suggestions that - // failed to apply, assuming that they can be fixed the next time we run - // rustc. - // - // Naturally, we want a few protections in place here though to avoid looping - // forever or otherwise losing data. To that end we have a few termination - // conditions: - // - // * Do this whole process a fixed number of times. In theory we probably - // need an infinite number of times to apply fixes, but we're not gonna - // sit around waiting for that. - // * If it looks like a fix genuinely can't be applied we need to bail out. - // Detect this when a fix fails to get applied *and* no suggestions - // successfully applied to the same file. In that case looks like we - // definitely can't make progress, so bail out. - let mut fixes = FixedCrate::default(); - let mut last_fix_counts = HashMap::new(); - let iterations = env::var("CARGO_FIX_MAX_RETRIES") - .ok() - .and_then(|n| n.parse().ok()) - .unwrap_or(4); - for _ in 0..iterations { - last_fix_counts.clear(); - for (path, file) in fixes.files.iter_mut() { - last_fix_counts.insert(path.clone(), file.fixes_applied); - // We'll generate new errors below. - file.errors_applying_fixes.clear(); - } - rustfix_and_fix(&mut fixes, rustc, filename, args, config)?; - let mut progress_yet_to_be_made = false; - for (path, file) in fixes.files.iter_mut() { - if file.errors_applying_fixes.is_empty() { - continue; - } - // If anything was successfully fixed *and* there's at least one - // error, then assume the error was spurious and we'll try again on - // the next iteration. - if file.fixes_applied != *last_fix_counts.get(path).unwrap_or(&0) { - progress_yet_to_be_made = true; - } - } - if !progress_yet_to_be_made { - break; - } - } - - // Any errors still remaining at this point need to be reported as probably - // bugs in Cargo and/or rustfix. - for (path, file) in fixes.files.iter_mut() { - for error in file.errors_applying_fixes.drain(..) { - Message::ReplaceFailed { - file: path.clone(), - message: error, - } - .post()?; - } - } - - Ok(fixes) -} - -/// Executes `rustc` to apply one round of suggestions to the crate in question. -/// -/// This will fill in the `fixes` map with original code, suggestions applied, -/// and any errors encountered while fixing files. -fn rustfix_and_fix( - fixes: &mut FixedCrate, - rustc: &ProcessBuilder, - filename: &Path, - args: &FixArgs, - config: &Config, -) -> Result<(), Error> { - // If not empty, filter by these lints. - // TODO: implement a way to specify this. - let only = HashSet::new(); - - let mut cmd = rustc.build_command(); - cmd.arg("--error-format=json"); - args.apply(&mut cmd); - debug!( - "calling rustc to collect suggestions and validate previous fixes: {:?}", - cmd - ); - let output = cmd.output().with_context(|| { - format!( - "failed to execute `{}`", - rustc.get_program().to_string_lossy() - ) - })?; - - // If rustc didn't succeed for whatever reasons then we're very likely to be - // looking at otherwise broken code. Let's not make things accidentally - // worse by applying fixes where a bug could cause *more* broken code. - // Instead, punt upwards which will reexec rustc over the original code, - // displaying pretty versions of the diagnostics we just read out. - if !output.status.success() && env::var_os(BROKEN_CODE_ENV).is_none() { - debug!( - "rustfixing `{:?}` failed, rustc exited with {:?}", - filename, - output.status.code() - ); - return Ok(()); - } - - let fix_mode = env::var_os("__CARGO_FIX_YOLO") - .map(|_| rustfix::Filter::Everything) - .unwrap_or(rustfix::Filter::MachineApplicableOnly); - - // Sift through the output of the compiler to look for JSON messages. - // indicating fixes that we can apply. - let stderr = str::from_utf8(&output.stderr).context("failed to parse rustc stderr as UTF-8")?; - - let suggestions = stderr - .lines() - .filter(|x| !x.is_empty()) - .inspect(|y| trace!("line: {}", y)) - // Parse each line of stderr, ignoring errors, as they may not all be JSON. - .filter_map(|line| serde_json::from_str::(line).ok()) - // From each diagnostic, try to extract suggestions from rustc. - .filter_map(|diag| rustfix::collect_suggestions(&diag, &only, fix_mode)); - - // Collect suggestions by file so we can apply them one at a time later. - let mut file_map = HashMap::new(); - let mut num_suggestion = 0; - // It's safe since we won't read any content under home dir. - let home_path = config.home().as_path_unlocked(); - for suggestion in suggestions { - trace!("suggestion"); - // Make sure we've got a file associated with this suggestion and all - // snippets point to the same file. Right now it's not clear what - // we would do with multiple files. - let file_names = suggestion - .solutions - .iter() - .flat_map(|s| s.replacements.iter()) - .map(|r| &r.snippet.file_name); - - let file_name = if let Some(file_name) = file_names.clone().next() { - file_name.clone() - } else { - trace!("rejecting as it has no solutions {:?}", suggestion); - continue; - }; - - // Do not write into registry cache. See rust-lang/cargo#9857. - if Path::new(&file_name).starts_with(home_path) { - continue; - } - - if !file_names.clone().all(|f| f == &file_name) { - trace!("rejecting as it changes multiple files: {:?}", suggestion); - continue; - } - - trace!("adding suggestion for {:?}: {:?}", file_name, suggestion); - file_map - .entry(file_name) - .or_insert_with(Vec::new) - .push(suggestion); - num_suggestion += 1; - } - - debug!( - "collected {} suggestions for `{}`", - num_suggestion, - filename.display(), - ); - - for (file, suggestions) in file_map { - // Attempt to read the source code for this file. If this fails then - // that'd be pretty surprising, so log a message and otherwise keep - // going. - let code = match paths::read(file.as_ref()) { - Ok(s) => s, - Err(e) => { - warn!("failed to read `{}`: {}", file, e); - continue; - } - }; - let num_suggestions = suggestions.len(); - debug!("applying {} fixes to {}", num_suggestions, file); - - // If this file doesn't already exist then we just read the original - // code, so save it. If the file already exists then the original code - // doesn't need to be updated as we've just read an interim state with - // some fixes but perhaps not all. - let fixed_file = fixes - .files - .entry(file.clone()) - .or_insert_with(|| FixedFile { - errors_applying_fixes: Vec::new(), - fixes_applied: 0, - original_code: code.clone(), - }); - let mut fixed = CodeFix::new(&code); - - // As mentioned above in `rustfix_crate`, we don't immediately warn - // about suggestions that fail to apply here, and instead we save them - // off for later processing. - for suggestion in suggestions.iter().rev() { - match fixed.apply(suggestion) { - Ok(()) => fixed_file.fixes_applied += 1, - Err(e) => fixed_file.errors_applying_fixes.push(e.to_string()), - } - } - let new_code = fixed.finish()?; - paths::write(&file, new_code)?; - } - - Ok(()) -} - -fn exit_with(status: ExitStatus) -> ! { - #[cfg(unix)] - { - use std::io::Write; - use std::os::unix::prelude::*; - if let Some(signal) = status.signal() { - drop(writeln!( - std::io::stderr().lock(), - "child failed with signal `{}`", - signal - )); - process::exit(2); - } - } - process::exit(status.code().unwrap_or(3)); -} - -fn log_failed_fix(stderr: &[u8], status: ExitStatus) -> Result<(), Error> { - let stderr = str::from_utf8(stderr).context("failed to parse rustc stderr as utf-8")?; - - let diagnostics = stderr - .lines() - .filter(|x| !x.is_empty()) - .filter_map(|line| serde_json::from_str::(line).ok()); - let mut files = BTreeSet::new(); - let mut errors = Vec::new(); - for diagnostic in diagnostics { - errors.push(diagnostic.rendered.unwrap_or(diagnostic.message)); - for span in diagnostic.spans.into_iter() { - files.insert(span.file_name); - } - } - // Include any abnormal messages (like an ICE or whatever). - errors.extend( - stderr - .lines() - .filter(|x| !x.starts_with('{')) - .map(|x| x.to_string()), - ); - let mut krate = None; - let mut prev_dash_dash_krate_name = false; - for arg in env::args() { - if prev_dash_dash_krate_name { - krate = Some(arg.clone()); - } - - if arg == "--crate-name" { - prev_dash_dash_krate_name = true; - } else { - prev_dash_dash_krate_name = false; - } - } - - let files = files.into_iter().collect(); - let abnormal_exit = if status.code().map_or(false, is_simple_exit_code) { - None - } else { - Some(exit_status_to_string(status)) - }; - Message::FixFailed { - files, - krate, - errors, - abnormal_exit, - } - .post()?; - - Ok(()) -} - -/// Various command-line options and settings used when `cargo` is running as -/// a proxy for `rustc` during the fix operation. -struct FixArgs { - /// This is the `.rs` file that is being fixed. - file: PathBuf, - /// If `--edition` is used to migrate to the next edition, this is the - /// edition we are migrating towards. - prepare_for_edition: Option, - /// `true` if `--edition-idioms` is enabled. - idioms: bool, - /// The current edition. - /// - /// `None` if on 2015. - enabled_edition: Option, - /// Other command-line arguments not reflected by other fields in - /// `FixArgs`. - other: Vec, - /// Path to the `rustc` executable. - rustc: PathBuf, - /// Console output flags (`--error-format`, `--json`, etc.). - /// - /// The normal fix procedure always uses `--json`, so it overrides what - /// Cargo normally passes when applying fixes. When displaying warnings or - /// errors, it will use these flags. - format_args: Vec, -} - -impl FixArgs { - fn get() -> Result { - let rustc = env::args_os() - .nth(1) - .map(PathBuf::from) - .ok_or_else(|| anyhow::anyhow!("expected rustc as first argument"))?; - let mut file = None; - let mut enabled_edition = None; - let mut other = Vec::new(); - let mut format_args = Vec::new(); - - for arg in env::args_os().skip(2) { - let path = PathBuf::from(arg); - if path.extension().and_then(|s| s.to_str()) == Some("rs") && path.exists() { - file = Some(path); - continue; - } - if let Some(s) = path.to_str() { - if let Some(edition) = s.strip_prefix("--edition=") { - enabled_edition = Some(edition.parse()?); - continue; - } - if s.starts_with("--error-format=") || s.starts_with("--json=") { - // Cargo may add error-format in some cases, but `cargo - // fix` wants to add its own. - format_args.push(s.to_string()); - continue; - } - } - other.push(path.into()); - } - let file = file.ok_or_else(|| anyhow::anyhow!("could not find .rs file in rustc args"))?; - let idioms = env::var(IDIOMS_ENV).is_ok(); - - let prepare_for_edition = env::var(EDITION_ENV).ok().map(|_| { - enabled_edition - .unwrap_or(Edition::Edition2015) - .saturating_next() - }); - - Ok(FixArgs { - file, - prepare_for_edition, - idioms, - enabled_edition, - other, - rustc, - format_args, - }) - } - - fn apply(&self, cmd: &mut Command) { - cmd.arg(&self.file); - cmd.args(&self.other); - if self.prepare_for_edition.is_some() { - // When migrating an edition, we don't want to fix other lints as - // they can sometimes add suggestions that fail to apply, causing - // the entire migration to fail. But those lints aren't needed to - // migrate. - cmd.arg("--cap-lints=allow"); - } else { - // This allows `cargo fix` to work even if the crate has #[deny(warnings)]. - cmd.arg("--cap-lints=warn"); - } - if let Some(edition) = self.enabled_edition { - cmd.arg("--edition").arg(edition.to_string()); - if self.idioms && edition.supports_idiom_lint() { - cmd.arg(format!("-Wrust-{}-idioms", edition)); - } - } - - if let Some(edition) = self.prepare_for_edition { - if edition.supports_compat_lint() { - cmd.arg("--force-warn") - .arg(format!("rust-{}-compatibility", edition)); - } - } - } - - /// Validates the edition, and sends a message indicating what is being - /// done. Returns a flag indicating whether this fix should be run. - fn can_run_rustfix(&self, config: &Config) -> CargoResult { - let to_edition = match self.prepare_for_edition { - Some(s) => s, - None => { - return Message::Fixing { - file: self.file.display().to_string(), - } - .post() - .and(Ok(true)); - } - }; - // Unfortunately determining which cargo targets are being built - // isn't easy, and each target can be a different edition. The - // cargo-as-rustc fix wrapper doesn't know anything about the - // workspace, so it can't check for the `cargo-features` unstable - // opt-in. As a compromise, this just restricts to the nightly - // toolchain. - // - // Unfortunately this results in a pretty poor error message when - // multiple jobs run in parallel (the error appears multiple - // times). Hopefully this doesn't happen often in practice. - if !to_edition.is_stable() && !config.nightly_features_allowed { - let message = format!( - "`{file}` is on the latest edition, but trying to \ - migrate to edition {to_edition}.\n\ - Edition {to_edition} is unstable and not allowed in \ - this release, consider trying the nightly release channel.", - file = self.file.display(), - to_edition = to_edition - ); - return Message::EditionAlreadyEnabled { - message, - edition: to_edition.previous().unwrap(), - } - .post() - .and(Ok(false)); // Do not run rustfix for this the edition. - } - let from_edition = self.enabled_edition.unwrap_or(Edition::Edition2015); - if from_edition == to_edition { - let message = format!( - "`{}` is already on the latest edition ({}), \ - unable to migrate further", - self.file.display(), - to_edition - ); - Message::EditionAlreadyEnabled { - message, - edition: to_edition, - } - .post() - } else { - Message::Migrating { - file: self.file.display().to_string(), - from_edition, - to_edition, - } - .post() - } - .and(Ok(true)) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/lockfile.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/lockfile.rs deleted file mode 100644 index ec0255ff4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/lockfile.rs +++ /dev/null @@ -1,220 +0,0 @@ -use std::io::prelude::*; - -use crate::core::{resolver, Resolve, ResolveVersion, Workspace}; -use crate::util::errors::CargoResult; -use crate::util::toml as cargo_toml; -use crate::util::Filesystem; - -use anyhow::Context as _; - -pub fn load_pkg_lockfile(ws: &Workspace<'_>) -> CargoResult> { - if !ws.root().join("Cargo.lock").exists() { - return Ok(None); - } - - let root = Filesystem::new(ws.root().to_path_buf()); - let mut f = root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file")?; - - let mut s = String::new(); - f.read_to_string(&mut s) - .with_context(|| format!("failed to read file: {}", f.path().display()))?; - - let resolve = (|| -> CargoResult> { - let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; - let v: resolver::EncodableResolve = resolve.try_into()?; - Ok(Some(v.into_resolve(&s, ws)?)) - })() - .with_context(|| format!("failed to parse lock file at: {}", f.path().display()))?; - Ok(resolve) -} - -/// Generate a toml String of Cargo.lock from a Resolve. -pub fn resolve_to_string(ws: &Workspace<'_>, resolve: &mut Resolve) -> CargoResult { - let (_orig, out, _ws_root) = resolve_to_string_orig(ws, resolve); - Ok(out) -} - -pub fn write_pkg_lockfile(ws: &Workspace<'_>, resolve: &mut Resolve) -> CargoResult<()> { - let (orig, mut out, ws_root) = resolve_to_string_orig(ws, resolve); - - // If the lock file contents haven't changed so don't rewrite it. This is - // helpful on read-only filesystems. - if let Some(orig) = &orig { - if are_equal_lockfiles(orig, &out, ws) { - return Ok(()); - } - } - - if !ws.config().lock_update_allowed() { - let flag = if ws.config().network_allowed() { - "--locked" - } else { - "--frozen" - }; - anyhow::bail!( - "the lock file {} needs to be updated but {} was passed to prevent this\n\ - If you want to try to generate the lock file without accessing the network, \ - remove the {} flag and use --offline instead.", - ws.root().to_path_buf().join("Cargo.lock").display(), - flag, - flag - ); - } - - // While we're updating the lock file anyway go ahead and update its - // encoding to whatever the latest default is. That way we can slowly roll - // out lock file updates as they're otherwise already updated, and changes - // which don't touch dependencies won't seemingly spuriously update the lock - // file. - if resolve.version() < ResolveVersion::default() { - resolve.set_version(ResolveVersion::default()); - out = serialize_resolve(resolve, orig.as_deref()); - } - - // Ok, if that didn't work just write it out - ws_root - .open_rw("Cargo.lock", ws.config(), "Cargo.lock file") - .and_then(|mut f| { - f.file().set_len(0)?; - f.write_all(out.as_bytes())?; - Ok(()) - }) - .with_context(|| format!("failed to write {}", ws.root().join("Cargo.lock").display()))?; - Ok(()) -} - -fn resolve_to_string_orig( - ws: &Workspace<'_>, - resolve: &mut Resolve, -) -> (Option, String, Filesystem) { - // Load the original lock file if it exists. - let ws_root = Filesystem::new(ws.root().to_path_buf()); - let orig = ws_root.open_ro("Cargo.lock", ws.config(), "Cargo.lock file"); - let orig = orig.and_then(|mut f| { - let mut s = String::new(); - f.read_to_string(&mut s)?; - Ok(s) - }); - let out = serialize_resolve(resolve, orig.as_deref().ok()); - (orig.ok(), out, ws_root) -} - -fn serialize_resolve(resolve: &Resolve, orig: Option<&str>) -> String { - let toml = toml::Value::try_from(resolve).unwrap(); - - let mut out = String::new(); - - // At the start of the file we notify the reader that the file is generated. - // Specifically Phabricator ignores files containing "@generated", so we use that. - let marker_line = "# This file is automatically @generated by Cargo."; - let extra_line = "# It is not intended for manual editing."; - out.push_str(marker_line); - out.push('\n'); - out.push_str(extra_line); - out.push('\n'); - // and preserve any other top comments - if let Some(orig) = orig { - let mut comments = orig.lines().take_while(|line| line.starts_with('#')); - if let Some(first) = comments.next() { - if first != marker_line { - out.push_str(first); - out.push('\n'); - } - if let Some(second) = comments.next() { - if second != extra_line { - out.push_str(second); - out.push('\n'); - } - for line in comments { - out.push_str(line); - out.push('\n'); - } - } - } - } - - if let Some(version) = toml.get("version") { - out.push_str(&format!("version = {}\n\n", version)); - } - - let deps = toml["package"].as_array().unwrap(); - for dep in deps { - let dep = dep.as_table().unwrap(); - - out.push_str("[[package]]\n"); - emit_package(dep, &mut out); - } - - if let Some(patch) = toml.get("patch") { - let list = patch["unused"].as_array().unwrap(); - for entry in list { - out.push_str("[[patch.unused]]\n"); - emit_package(entry.as_table().unwrap(), &mut out); - out.push('\n'); - } - } - - if let Some(meta) = toml.get("metadata") { - out.push_str("[metadata]\n"); - out.push_str(&meta.to_string()); - } - - // Historical versions of Cargo in the old format accidentally left trailing - // blank newlines at the end of files, so we just leave that as-is. For all - // encodings going forward, though, we want to be sure that our encoded lock - // file doesn't contain any trailing newlines so trim out the extra if - // necessary. - if resolve.version() >= ResolveVersion::V2 { - while out.ends_with("\n\n") { - out.pop(); - } - } - out -} - -fn are_equal_lockfiles(orig: &str, current: &str, ws: &Workspace<'_>) -> bool { - // If we want to try and avoid updating the lock file, parse both and - // compare them; since this is somewhat expensive, don't do it in the - // common case where we can update lock files. - if !ws.config().lock_update_allowed() { - let res: CargoResult = (|| { - let old: resolver::EncodableResolve = toml::from_str(orig)?; - let new: resolver::EncodableResolve = toml::from_str(current)?; - Ok(old.into_resolve(orig, ws)? == new.into_resolve(current, ws)?) - })(); - if let Ok(true) = res { - return true; - } - } - - orig.lines().eq(current.lines()) -} - -fn emit_package(dep: &toml::value::Table, out: &mut String) { - out.push_str(&format!("name = {}\n", &dep["name"])); - out.push_str(&format!("version = {}\n", &dep["version"])); - - if dep.contains_key("source") { - out.push_str(&format!("source = {}\n", &dep["source"])); - } - if dep.contains_key("checksum") { - out.push_str(&format!("checksum = {}\n", &dep["checksum"])); - } - - if let Some(s) = dep.get("dependencies") { - let slice = s.as_array().unwrap(); - - if !slice.is_empty() { - out.push_str("dependencies = [\n"); - - for child in slice.iter() { - out.push_str(&format!(" {},\n", child)); - } - - out.push_str("]\n"); - } - out.push('\n'); - } else if dep.contains_key("replace") { - out.push_str(&format!("replace = {}\n\n", &dep["replace"])); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/mod.rs deleted file mode 100644 index e81486f3d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/mod.rs +++ /dev/null @@ -1,88 +0,0 @@ -use crate::sources::CRATES_IO_DOMAIN; - -pub use self::cargo_clean::{clean, CleanOptions}; -pub use self::cargo_compile::{ - compile, compile_with_exec, compile_ws, create_bcx, print, resolve_all_features, CompileOptions, -}; -pub use self::cargo_compile::{CompileFilter, FilterRule, LibRule, Packages}; -pub use self::cargo_doc::{doc, DocOptions}; -pub use self::cargo_fetch::{fetch, FetchOptions}; -pub use self::cargo_generate_lockfile::generate_lockfile; -pub use self::cargo_generate_lockfile::update_lockfile; -pub use self::cargo_generate_lockfile::UpdateOptions; -pub use self::cargo_install::{install, install_list}; -pub use self::cargo_new::{init, new, NewOptions, VersionControl}; -pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions}; -pub use self::cargo_package::{package, package_one, PackageOpts}; -pub use self::cargo_pkgid::pkgid; -pub use self::cargo_read_manifest::{read_package, read_packages}; -pub use self::cargo_run::run; -pub use self::cargo_test::{run_benches, run_tests, TestOptions}; -pub use self::cargo_uninstall::uninstall; -pub use self::fix::{fix, fix_maybe_exec_rustc, FixOptions}; -pub use self::lockfile::{load_pkg_lockfile, resolve_to_string, write_pkg_lockfile}; -pub use self::registry::HttpTimeout; -pub use self::registry::{configure_http_handle, http_handle, http_handle_and_timeout}; -pub use self::registry::{modify_owners, yank, OwnersOptions, PublishOpts}; -pub use self::registry::{needs_custom_http_transport, registry_login, registry_logout, search}; -pub use self::registry::{publish, registry_configuration, RegistryConfig}; -pub use self::resolve::{ - add_overrides, get_resolved_packages, resolve_with_previous, resolve_ws, resolve_ws_with_opts, - WorkspaceResolve, -}; -pub use self::vendor::{vendor, VendorOptions}; - -mod cargo_clean; -mod cargo_compile; -pub mod cargo_config; -mod cargo_doc; -mod cargo_fetch; -mod cargo_generate_lockfile; -mod cargo_install; -mod cargo_new; -mod cargo_output_metadata; -mod cargo_package; -mod cargo_pkgid; -mod cargo_read_manifest; -mod cargo_run; -mod cargo_test; -mod cargo_uninstall; -mod common_for_install_and_uninstall; -mod fix; -mod lockfile; -mod registry; -mod resolve; -pub mod tree; -mod vendor; - -/// Returns true if the dependency is either git or path, false otherwise -/// Error if a git/path dep is transitive, but has no version (registry source). -/// This check is performed on dependencies before publishing or packaging -fn check_dep_has_version(dep: &crate::core::Dependency, publish: bool) -> crate::CargoResult { - let which = if dep.source_id().is_path() { - "path" - } else if dep.source_id().is_git() { - "git" - } else { - return Ok(false); - }; - - if !dep.specified_req() && dep.is_transitive() { - let dep_version_source = dep.registry_id().map_or_else( - || CRATES_IO_DOMAIN.to_string(), - |registry_id| registry_id.display_registry_name(), - ); - anyhow::bail!( - "all dependencies must have a version specified when {}.\n\ - dependency `{}` does not specify a version\n\ - Note: The {} dependency will use the version from {},\n\ - the `{}` specification will be removed from the dependency declaration.", - if publish { "publishing" } else { "packaging" }, - dep.package_name(), - if publish { "published" } else { "packaged" }, - dep_version_source, - which, - ) - } - Ok(true) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/registry.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/registry.rs deleted file mode 100644 index fe0a94692..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/registry.rs +++ /dev/null @@ -1,993 +0,0 @@ -use std::collections::{BTreeMap, HashSet}; -use std::fs::File; -use std::io::{self, BufRead}; -use std::iter::repeat; -use std::path::PathBuf; -use std::str; -use std::time::Duration; -use std::{cmp, env}; - -use anyhow::{bail, format_err, Context as _}; -use cargo_util::paths; -use crates_io::{self, NewCrate, NewCrateDependency, Registry}; -use curl::easy::{Easy, InfoType, SslOpt, SslVersion}; -use log::{log, Level}; -use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; - -use crate::core::dependency::DepKind; -use crate::core::manifest::ManifestMetadata; -use crate::core::resolver::CliFeatures; -use crate::core::source::Source; -use crate::core::{Package, SourceId, Workspace}; -use crate::ops; -use crate::sources::{RegistrySource, SourceConfigMap, CRATES_IO_DOMAIN, CRATES_IO_REGISTRY}; -use crate::util::config::{self, Config, SslVersionConfig, SslVersionConfigRange}; -use crate::util::errors::CargoResult; -use crate::util::important_paths::find_root_manifest_for_wd; -use crate::util::validate_package_name; -use crate::util::IntoUrl; -use crate::{drop_print, drop_println, version}; - -mod auth; - -/// Registry settings loaded from config files. -/// -/// This is loaded based on the `--registry` flag and the config settings. -#[derive(Debug)] -pub struct RegistryConfig { - /// The index URL. If `None`, use crates.io. - pub index: Option, - /// The authentication token. - pub token: Option, - /// Process used for fetching a token. - pub credential_process: Option<(PathBuf, Vec)>, -} - -pub struct PublishOpts<'cfg> { - pub config: &'cfg Config, - pub token: Option, - pub index: Option, - pub verify: bool, - pub allow_dirty: bool, - pub jobs: Option, - pub to_publish: ops::Packages, - pub targets: Vec, - pub dry_run: bool, - pub registry: Option, - pub cli_features: CliFeatures, -} - -pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { - let specs = opts.to_publish.to_package_id_specs(ws)?; - let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?; - - let (pkg, cli_features) = pkgs.pop().unwrap(); - - let mut publish_registry = opts.registry.clone(); - if let Some(ref allowed_registries) = *pkg.publish() { - if publish_registry.is_none() && allowed_registries.len() == 1 { - // If there is only one allowed registry, push to that one directly, - // even though there is no registry specified in the command. - let default_registry = &allowed_registries[0]; - if default_registry != CRATES_IO_REGISTRY { - // Don't change the registry for crates.io and don't warn the user. - // crates.io will be defaulted even without this. - opts.config.shell().note(&format!( - "Found `{}` as only allowed registry. Publishing to it automatically.", - default_registry - ))?; - publish_registry = Some(default_registry.clone()); - } - } - - let reg_name = publish_registry - .clone() - .unwrap_or_else(|| CRATES_IO_REGISTRY.to_string()); - if !allowed_registries.contains(®_name) { - bail!( - "`{}` cannot be published.\n\ - The registry `{}` is not listed in the `publish` value in Cargo.toml.", - pkg.name(), - reg_name - ); - } - } - - let (mut registry, _reg_cfg, reg_id) = registry( - opts.config, - opts.token.clone(), - opts.index.clone(), - publish_registry, - true, - !opts.dry_run, - )?; - verify_dependencies(pkg, ®istry, reg_id)?; - - // Prepare a tarball, with a non-suppressible warning if metadata - // is missing since this is being put online. - let tarball = ops::package_one( - ws, - pkg, - &ops::PackageOpts { - config: opts.config, - verify: opts.verify, - list: false, - check_metadata: true, - allow_dirty: opts.allow_dirty, - to_package: ops::Packages::Default, - targets: opts.targets.clone(), - jobs: opts.jobs, - cli_features: cli_features, - }, - )? - .unwrap(); - - opts.config - .shell() - .status("Uploading", pkg.package_id().to_string())?; - transmit( - opts.config, - pkg, - tarball.file(), - &mut registry, - reg_id, - opts.dry_run, - )?; - - Ok(()) -} - -fn verify_dependencies( - pkg: &Package, - registry: &Registry, - registry_src: SourceId, -) -> CargoResult<()> { - for dep in pkg.dependencies().iter() { - if super::check_dep_has_version(dep, true)? { - continue; - } - // TomlManifest::prepare_for_publish will rewrite the dependency - // to be just the `version` field. - if dep.source_id() != registry_src { - if !dep.source_id().is_registry() { - // Consider making SourceId::kind a public type that we can - // exhaustively match on. Using match can help ensure that - // every kind is properly handled. - panic!("unexpected source kind for dependency {:?}", dep); - } - // Block requests to send to crates.io with alt-registry deps. - // This extra hostname check is mostly to assist with testing, - // but also prevents someone using `--index` to specify - // something that points to crates.io. - if registry_src.is_default_registry() || registry.host_is_crates_io() { - bail!("crates cannot be published to crates.io with dependencies sourced from other\n\ - registries. `{}` needs to be published to crates.io before publishing this crate.\n\ - (crate `{}` is pulled from {})", - dep.package_name(), - dep.package_name(), - dep.source_id()); - } - } - } - Ok(()) -} - -fn transmit( - config: &Config, - pkg: &Package, - tarball: &File, - registry: &mut Registry, - registry_id: SourceId, - dry_run: bool, -) -> CargoResult<()> { - let deps = pkg - .dependencies() - .iter() - .filter(|dep| { - // Skip dev-dependency without version. - dep.is_transitive() || dep.specified_req() - }) - .map(|dep| { - // If the dependency is from a different registry, then include the - // registry in the dependency. - let dep_registry_id = match dep.registry_id() { - Some(id) => id, - None => SourceId::crates_io(config)?, - }; - // In the index and Web API, None means "from the same registry" - // whereas in Cargo.toml, it means "from crates.io". - let dep_registry = if dep_registry_id != registry_id { - Some(dep_registry_id.url().to_string()) - } else { - None - }; - - Ok(NewCrateDependency { - optional: dep.is_optional(), - default_features: dep.uses_default_features(), - name: dep.package_name().to_string(), - features: dep.features().iter().map(|s| s.to_string()).collect(), - version_req: dep.version_req().to_string(), - target: dep.platform().map(|s| s.to_string()), - kind: match dep.kind() { - DepKind::Normal => "normal", - DepKind::Build => "build", - DepKind::Development => "dev", - } - .to_string(), - registry: dep_registry, - explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()), - }) - }) - .collect::>>()?; - let manifest = pkg.manifest(); - let ManifestMetadata { - ref authors, - ref description, - ref homepage, - ref documentation, - ref keywords, - ref readme, - ref repository, - ref license, - ref license_file, - ref categories, - ref badges, - ref links, - } = *manifest.metadata(); - let readme_content = readme - .as_ref() - .map(|readme| { - paths::read(&pkg.root().join(readme)) - .with_context(|| format!("failed to read `readme` file for package `{}`", pkg)) - }) - .transpose()?; - if let Some(ref file) = *license_file { - if !pkg.root().join(file).exists() { - bail!("the license file `{}` does not exist", file) - } - } - - // Do not upload if performing a dry run - if dry_run { - config.shell().warn("aborting upload due to dry run")?; - return Ok(()); - } - - let string_features = match manifest.original().features() { - Some(features) => features - .iter() - .map(|(feat, values)| { - ( - feat.to_string(), - values.iter().map(|fv| fv.to_string()).collect(), - ) - }) - .collect::>>(), - None => BTreeMap::new(), - }; - - let warnings = registry - .publish( - &NewCrate { - name: pkg.name().to_string(), - vers: pkg.version().to_string(), - deps, - features: string_features, - authors: authors.clone(), - description: description.clone(), - homepage: homepage.clone(), - documentation: documentation.clone(), - keywords: keywords.clone(), - categories: categories.clone(), - readme: readme_content, - readme_file: readme.clone(), - repository: repository.clone(), - license: license.clone(), - license_file: license_file.clone(), - badges: badges.clone(), - links: links.clone(), - v: None, - }, - tarball, - ) - .with_context(|| format!("failed to publish to registry at {}", registry.host()))?; - - if !warnings.invalid_categories.is_empty() { - let msg = format!( - "the following are not valid category slugs and were \ - ignored: {}. Please see https://crates.io/category_slugs \ - for the list of all category slugs. \ - ", - warnings.invalid_categories.join(", ") - ); - config.shell().warn(&msg)?; - } - - if !warnings.invalid_badges.is_empty() { - let msg = format!( - "the following are not valid badges and were ignored: {}. \ - Either the badge type specified is unknown or a required \ - attribute is missing. Please see \ - https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata \ - for valid badge types and their required attributes.", - warnings.invalid_badges.join(", ") - ); - config.shell().warn(&msg)?; - } - - if !warnings.other.is_empty() { - for msg in warnings.other { - config.shell().warn(&msg)?; - } - } - - Ok(()) -} - -/// Returns the index and token from the config file for the given registry. -/// -/// `registry` is typically the registry specified on the command-line. If -/// `None`, `index` is set to `None` to indicate it should use crates.io. -pub fn registry_configuration( - config: &Config, - registry: Option<&str>, -) -> CargoResult { - let err_both = |token_key: &str, proc_key: &str| { - Err(format_err!( - "both `{TOKEN_KEY}` and `{PROC_KEY}` \ - were specified in the config\n\ - Only one of these values may be set, remove one or the other to proceed.", - TOKEN_KEY = token_key, - PROC_KEY = proc_key, - )) - }; - // `registry.default` is handled in command-line parsing. - let (index, token, process) = match registry { - Some(registry) => { - validate_package_name(registry, "registry name", "")?; - let index = Some(config.get_registry_index(registry)?.to_string()); - let token_key = format!("registries.{}.token", registry); - let token = config.get_string(&token_key)?.map(|p| p.val); - let process = if config.cli_unstable().credential_process { - let mut proc_key = format!("registries.{}.credential-process", registry); - let mut process = config.get::>(&proc_key)?; - if process.is_none() && token.is_none() { - // This explicitly ignores the global credential-process if - // the token is set, as that is "more specific". - proc_key = String::from("registry.credential-process"); - process = config.get::>(&proc_key)?; - } else if process.is_some() && token.is_some() { - return err_both(&token_key, &proc_key); - } - process - } else { - None - }; - (index, token, process) - } - None => { - // Use crates.io default. - config.check_registry_index_not_set()?; - let token = config.get_string("registry.token")?.map(|p| p.val); - let process = if config.cli_unstable().credential_process { - let process = - config.get::>("registry.credential-process")?; - if token.is_some() && process.is_some() { - return err_both("registry.token", "registry.credential-process"); - } - process - } else { - None - }; - (None, token, process) - } - }; - - let credential_process = - process.map(|process| (process.path.resolve_program(config), process.args)); - - Ok(RegistryConfig { - index, - token, - credential_process, - }) -} - -/// Returns the `Registry` and `Source` based on command-line and config settings. -/// -/// * `token`: The token from the command-line. If not set, uses the token -/// from the config. -/// * `index`: The index URL from the command-line. This is ignored if -/// `registry` is set. -/// * `registry`: The registry name from the command-line. If neither -/// `registry`, or `index` are set, then uses `crates-io`, honoring -/// `[source]` replacement if defined. -/// * `force_update`: If `true`, forces the index to be updated. -/// * `validate_token`: If `true`, the token must be set. -fn registry( - config: &Config, - token: Option, - index: Option, - registry: Option, - force_update: bool, - validate_token: bool, -) -> CargoResult<(Registry, RegistryConfig, SourceId)> { - if index.is_some() && registry.is_some() { - // Otherwise we would silently ignore one or the other. - bail!("both `--index` and `--registry` should not be set at the same time"); - } - // Parse all configuration options - let reg_cfg = registry_configuration(config, registry.as_deref())?; - let opt_index = reg_cfg.index.as_ref().or_else(|| index.as_ref()); - let sid = get_source_id(config, opt_index, registry.as_ref())?; - if !sid.is_remote_registry() { - bail!( - "{} does not support API commands.\n\ - Check for a source-replacement in .cargo/config.", - sid - ); - } - let api_host = { - let _lock = config.acquire_package_cache_lock()?; - let mut src = RegistrySource::remote(sid, &HashSet::new(), config); - // Only update the index if the config is not available or `force` is set. - let cfg = src.config(); - let mut updated_cfg = || { - src.update() - .with_context(|| format!("failed to update {}", sid))?; - src.config() - }; - - let cfg = if force_update { - updated_cfg()? - } else { - cfg.or_else(|_| updated_cfg())? - }; - - cfg.and_then(|cfg| cfg.api) - .ok_or_else(|| format_err!("{} does not support API commands", sid))? - }; - let token = if validate_token { - if index.is_some() { - if token.is_none() { - bail!("command-line argument --index requires --token to be specified"); - } - token - } else { - // Check `is_default_registry` so that the crates.io index can - // change config.json's "api" value, and this won't affect most - // people. It will affect those using source replacement, but - // hopefully that's a relatively small set of users. - if token.is_none() - && reg_cfg.token.is_some() - && registry.is_none() - && !sid.is_default_registry() - && !crates_io::is_url_crates_io(&api_host) - { - config.shell().warn( - "using `registry.token` config value with source \ - replacement is deprecated\n\ - This may become a hard error in the future; \ - see .\n\ - Use the --token command-line flag to remove this warning.", - )?; - reg_cfg.token.clone() - } else { - let token = auth::auth_token( - config, - token.as_deref(), - reg_cfg.token.as_deref(), - reg_cfg.credential_process.as_ref(), - registry.as_deref(), - &api_host, - )?; - Some(token) - } - } - } else { - None - }; - let handle = http_handle(config)?; - Ok((Registry::new_handle(api_host, token, handle), reg_cfg, sid)) -} - -/// Creates a new HTTP handle with appropriate global configuration for cargo. -pub fn http_handle(config: &Config) -> CargoResult { - let (mut handle, timeout) = http_handle_and_timeout(config)?; - timeout.configure(&mut handle)?; - Ok(handle) -} - -pub fn http_handle_and_timeout(config: &Config) -> CargoResult<(Easy, HttpTimeout)> { - if config.frozen() { - bail!( - "attempting to make an HTTP request, but --frozen was \ - specified" - ) - } - if !config.network_allowed() { - bail!("can't make HTTP request in the offline mode") - } - - // The timeout option for libcurl by default times out the entire transfer, - // but we probably don't want this. Instead we only set timeouts for the - // connect phase as well as a "low speed" timeout so if we don't receive - // many bytes in a large-ish period of time then we time out. - let mut handle = Easy::new(); - let timeout = configure_http_handle(config, &mut handle)?; - Ok((handle, timeout)) -} - -pub fn needs_custom_http_transport(config: &Config) -> CargoResult { - Ok(http_proxy_exists(config)? - || *config.http_config()? != Default::default() - || env::var_os("HTTP_TIMEOUT").is_some()) -} - -/// Configure a libcurl http handle with the defaults options for Cargo -pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult { - let http = config.http_config()?; - if let Some(proxy) = http_proxy(config)? { - handle.proxy(&proxy)?; - } - if let Some(cainfo) = &http.cainfo { - let cainfo = cainfo.resolve_path(config); - handle.cainfo(&cainfo)?; - } - if let Some(check) = http.check_revoke { - handle.ssl_options(SslOpt::new().no_revoke(!check))?; - } - - if let Some(user_agent) = &http.user_agent { - handle.useragent(user_agent)?; - } else { - handle.useragent(&format!("cargo {}", version()))?; - } - - fn to_ssl_version(s: &str) -> CargoResult { - let version = match s { - "default" => SslVersion::Default, - "tlsv1" => SslVersion::Tlsv1, - "tlsv1.0" => SslVersion::Tlsv10, - "tlsv1.1" => SslVersion::Tlsv11, - "tlsv1.2" => SslVersion::Tlsv12, - "tlsv1.3" => SslVersion::Tlsv13, - _ => bail!( - "Invalid ssl version `{}`,\ - choose from 'default', 'tlsv1', 'tlsv1.0', 'tlsv1.1', 'tlsv1.2', 'tlsv1.3'.", - s - ), - }; - Ok(version) - } - if let Some(ssl_version) = &http.ssl_version { - match ssl_version { - SslVersionConfig::Single(s) => { - let version = to_ssl_version(s.as_str())?; - handle.ssl_version(version)?; - } - SslVersionConfig::Range(SslVersionConfigRange { min, max }) => { - let min_version = min - .as_ref() - .map_or(Ok(SslVersion::Default), |s| to_ssl_version(s))?; - let max_version = max - .as_ref() - .map_or(Ok(SslVersion::Default), |s| to_ssl_version(s))?; - handle.ssl_min_max_version(min_version, max_version)?; - } - } - } - - if let Some(true) = http.debug { - handle.verbose(true)?; - log::debug!("{:#?}", curl::Version::get()); - handle.debug_function(|kind, data| { - let (prefix, level) = match kind { - InfoType::Text => ("*", Level::Debug), - InfoType::HeaderIn => ("<", Level::Debug), - InfoType::HeaderOut => (">", Level::Debug), - InfoType::DataIn => ("{", Level::Trace), - InfoType::DataOut => ("}", Level::Trace), - InfoType::SslDataIn | InfoType::SslDataOut => return, - _ => return, - }; - match str::from_utf8(data) { - Ok(s) => { - for mut line in s.lines() { - if line.starts_with("Authorization:") { - line = "Authorization: [REDACTED]"; - } else if line[..line.len().min(10)].eq_ignore_ascii_case("set-cookie") { - line = "set-cookie: [REDACTED]"; - } - log!(level, "http-debug: {} {}", prefix, line); - } - } - Err(_) => { - log!( - level, - "http-debug: {} ({} bytes of data)", - prefix, - data.len() - ); - } - } - })?; - } - - HttpTimeout::new(config) -} - -#[must_use] -pub struct HttpTimeout { - pub dur: Duration, - pub low_speed_limit: u32, -} - -impl HttpTimeout { - pub fn new(config: &Config) -> CargoResult { - let config = config.http_config()?; - let low_speed_limit = config.low_speed_limit.unwrap_or(10); - let seconds = config - .timeout - .or_else(|| env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) - .unwrap_or(30); - Ok(HttpTimeout { - dur: Duration::new(seconds, 0), - low_speed_limit, - }) - } - - pub fn configure(&self, handle: &mut Easy) -> CargoResult<()> { - // The timeout option for libcurl by default times out the entire - // transfer, but we probably don't want this. Instead we only set - // timeouts for the connect phase as well as a "low speed" timeout so - // if we don't receive many bytes in a large-ish period of time then we - // time out. - handle.connect_timeout(self.dur)?; - handle.low_speed_time(self.dur)?; - handle.low_speed_limit(self.low_speed_limit)?; - Ok(()) - } -} - -/// Finds an explicit HTTP proxy if one is available. -/// -/// Favor cargo's `http.proxy`, then git's `http.proxy`. Proxies specified -/// via environment variables are picked up by libcurl. -fn http_proxy(config: &Config) -> CargoResult> { - let http = config.http_config()?; - if let Some(s) = &http.proxy { - return Ok(Some(s.clone())); - } - if let Ok(cfg) = git2::Config::open_default() { - if let Ok(s) = cfg.get_string("http.proxy") { - return Ok(Some(s)); - } - } - Ok(None) -} - -/// Determine if an http proxy exists. -/// -/// Checks the following for existence, in order: -/// -/// * cargo's `http.proxy` -/// * git's `http.proxy` -/// * `http_proxy` env var -/// * `HTTP_PROXY` env var -/// * `https_proxy` env var -/// * `HTTPS_PROXY` env var -fn http_proxy_exists(config: &Config) -> CargoResult { - if http_proxy(config)?.is_some() { - Ok(true) - } else { - Ok(["http_proxy", "HTTP_PROXY", "https_proxy", "HTTPS_PROXY"] - .iter() - .any(|v| env::var(v).is_ok())) - } -} - -pub fn registry_login( - config: &Config, - token: Option, - reg: Option, -) -> CargoResult<()> { - let (registry, reg_cfg, _) = registry(config, token.clone(), None, reg.clone(), false, false)?; - - let token = match token { - Some(token) => token, - None => { - drop_println!( - config, - "please paste the API Token found on {}/me below", - registry.host() - ); - let mut line = String::new(); - let input = io::stdin(); - input - .lock() - .read_line(&mut line) - .with_context(|| "failed to read stdin")?; - // Automatically remove `cargo login` from an inputted token to - // allow direct pastes from `registry.host()`/me. - line.replace("cargo login", "").trim().to_string() - } - }; - - if let Some(old_token) = ®_cfg.token { - if old_token == &token { - config.shell().status("Login", "already logged in")?; - return Ok(()); - } - } - - auth::login( - config, - token, - reg_cfg.credential_process.as_ref(), - reg.as_deref(), - registry.host(), - )?; - - config.shell().status( - "Login", - format!( - "token for `{}` saved", - reg.as_ref().map_or(CRATES_IO_DOMAIN, String::as_str) - ), - )?; - Ok(()) -} - -pub fn registry_logout(config: &Config, reg: Option) -> CargoResult<()> { - let (registry, reg_cfg, _) = registry(config, None, None, reg.clone(), false, false)?; - let reg_name = reg.as_deref().unwrap_or(CRATES_IO_DOMAIN); - if reg_cfg.credential_process.is_none() && reg_cfg.token.is_none() { - config.shell().status( - "Logout", - format!("not currently logged in to `{}`", reg_name), - )?; - return Ok(()); - } - auth::logout( - config, - reg_cfg.credential_process.as_ref(), - reg.as_deref(), - registry.host(), - )?; - config.shell().status( - "Logout", - format!( - "token for `{}` has been removed from local storage", - reg_name - ), - )?; - Ok(()) -} - -pub struct OwnersOptions { - pub krate: Option, - pub token: Option, - pub index: Option, - pub to_add: Option>, - pub to_remove: Option>, - pub list: bool, - pub registry: Option, -} - -pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { - let name = match opts.krate { - Some(ref name) => name.clone(), - None => { - let manifest_path = find_root_manifest_for_wd(config.cwd())?; - let ws = Workspace::new(&manifest_path, config)?; - ws.current()?.package_id().name().to_string() - } - }; - - let (mut registry, _, _) = registry( - config, - opts.token.clone(), - opts.index.clone(), - opts.registry.clone(), - true, - true, - )?; - - if let Some(ref v) = opts.to_add { - let v = v.iter().map(|s| &s[..]).collect::>(); - let msg = registry.add_owners(&name, &v).with_context(|| { - format!( - "failed to invite owners to crate `{}` on registry at {}", - name, - registry.host() - ) - })?; - - config.shell().status("Owner", msg)?; - } - - if let Some(ref v) = opts.to_remove { - let v = v.iter().map(|s| &s[..]).collect::>(); - config - .shell() - .status("Owner", format!("removing {:?} from crate {}", v, name))?; - registry.remove_owners(&name, &v).with_context(|| { - format!( - "failed to remove owners from crate `{}` on registry at {}", - name, - registry.host() - ) - })?; - } - - if opts.list { - let owners = registry.list_owners(&name).with_context(|| { - format!( - "failed to list owners of crate `{}` on registry at {}", - name, - registry.host() - ) - })?; - for owner in owners.iter() { - drop_print!(config, "{}", owner.login); - match (owner.name.as_ref(), owner.email.as_ref()) { - (Some(name), Some(email)) => drop_println!(config, " ({} <{}>)", name, email), - (Some(s), None) | (None, Some(s)) => drop_println!(config, " ({})", s), - (None, None) => drop_println!(config), - } - } - } - - Ok(()) -} - -pub fn yank( - config: &Config, - krate: Option, - version: Option, - token: Option, - index: Option, - undo: bool, - reg: Option, -) -> CargoResult<()> { - let name = match krate { - Some(name) => name, - None => { - let manifest_path = find_root_manifest_for_wd(config.cwd())?; - let ws = Workspace::new(&manifest_path, config)?; - ws.current()?.package_id().name().to_string() - } - }; - let version = match version { - Some(v) => v, - None => bail!("a version must be specified to yank"), - }; - - let (mut registry, _, _) = registry(config, token, index, reg, true, true)?; - - if undo { - config - .shell() - .status("Unyank", format!("{}:{}", name, version))?; - registry.unyank(&name, &version).with_context(|| { - format!( - "failed to undo a yank from the registry at {}", - registry.host() - ) - })?; - } else { - config - .shell() - .status("Yank", format!("{}:{}", name, version))?; - registry - .yank(&name, &version) - .with_context(|| format!("failed to yank from the registry at {}", registry.host()))?; - } - - Ok(()) -} - -/// Gets the SourceId for an index or registry setting. -/// -/// The `index` and `reg` values are from the command-line or config settings. -/// If both are None, returns the source for crates.io. -fn get_source_id( - config: &Config, - index: Option<&String>, - reg: Option<&String>, -) -> CargoResult { - match (reg, index) { - (Some(r), _) => SourceId::alt_registry(config, r), - (_, Some(i)) => SourceId::for_registry(&i.into_url()?), - _ => { - let map = SourceConfigMap::new(config)?; - let src = map.load(SourceId::crates_io(config)?, &HashSet::new())?; - Ok(src.replaced_source_id()) - } - } -} - -pub fn search( - query: &str, - config: &Config, - index: Option, - limit: u32, - reg: Option, -) -> CargoResult<()> { - fn truncate_with_ellipsis(s: &str, max_width: usize) -> String { - // We should truncate at grapheme-boundary and compute character-widths, - // yet the dependencies on unicode-segmentation and unicode-width are - // not worth it. - let mut chars = s.chars(); - let mut prefix = (&mut chars).take(max_width - 1).collect::(); - if chars.next().is_some() { - prefix.push('โ€ฆ'); - } - prefix - } - - let (mut registry, _, source_id) = registry(config, None, index, reg, false, false)?; - let (crates, total_crates) = registry.search(query, limit).with_context(|| { - format!( - "failed to retrieve search results from the registry at {}", - registry.host() - ) - })?; - - let names = crates - .iter() - .map(|krate| format!("{} = \"{}\"", krate.name, krate.max_version)) - .collect::>(); - - let description_margin = names.iter().map(|s| s.len() + 4).max().unwrap_or_default(); - - let description_length = cmp::max(80, 128 - description_margin); - - let descriptions = crates.iter().map(|krate| { - krate - .description - .as_ref() - .map(|desc| truncate_with_ellipsis(&desc.replace("\n", " "), description_length)) - }); - - for (name, description) in names.into_iter().zip(descriptions) { - let line = match description { - Some(desc) => { - let space = repeat(' ') - .take(description_margin - name.len()) - .collect::(); - name + &space + "# " + &desc - } - None => name, - }; - drop_println!(config, "{}", line); - } - - let search_max_limit = 100; - if total_crates > limit && limit < search_max_limit { - drop_println!( - config, - "... and {} crates more (use --limit N to see more)", - total_crates - limit - ); - } else if total_crates > limit && limit >= search_max_limit { - let extra = if source_id.is_default_registry() { - format!( - " (go to https://crates.io/search?q={} to see more)", - percent_encode(query.as_bytes(), NON_ALPHANUMERIC) - ) - } else { - String::new() - }; - drop_println!( - config, - "... and {} crates more{}", - total_crates - limit, - extra - ); - } - - Ok(()) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/registry/auth.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/registry/auth.rs deleted file mode 100644 index d10b1988e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/registry/auth.rs +++ /dev/null @@ -1,236 +0,0 @@ -//! Registry authentication support. - -use crate::sources::CRATES_IO_REGISTRY; -use crate::util::{config, CargoResult, Config}; -use anyhow::{bail, format_err, Context as _}; -use cargo_util::ProcessError; -use std::io::{Read, Write}; -use std::path::PathBuf; -use std::process::{Command, Stdio}; - -enum Action { - Get, - Store(String), - Erase, -} - -/// Returns the token to use for the given registry. -pub(super) fn auth_token( - config: &Config, - cli_token: Option<&str>, - config_token: Option<&str>, - credential_process: Option<&(PathBuf, Vec)>, - registry_name: Option<&str>, - api_url: &str, -) -> CargoResult { - let token = match (cli_token, config_token, credential_process) { - (None, None, None) => { - bail!("no upload token found, please run `cargo login` or pass `--token`"); - } - (Some(cli_token), _, _) => cli_token.to_string(), - (None, Some(config_token), _) => config_token.to_string(), - (None, None, Some(process)) => { - let registry_name = registry_name.unwrap_or(CRATES_IO_REGISTRY); - run_command(config, process, registry_name, api_url, Action::Get)?.unwrap() - } - }; - Ok(token) -} - -/// Saves the given token. -pub(super) fn login( - config: &Config, - token: String, - credential_process: Option<&(PathBuf, Vec)>, - registry_name: Option<&str>, - api_url: &str, -) -> CargoResult<()> { - if let Some(process) = credential_process { - let registry_name = registry_name.unwrap_or(CRATES_IO_REGISTRY); - run_command( - config, - process, - registry_name, - api_url, - Action::Store(token), - )?; - } else { - config::save_credentials(config, Some(token), registry_name)?; - } - Ok(()) -} - -/// Removes the token for the given registry. -pub(super) fn logout( - config: &Config, - credential_process: Option<&(PathBuf, Vec)>, - registry_name: Option<&str>, - api_url: &str, -) -> CargoResult<()> { - if let Some(process) = credential_process { - let registry_name = registry_name.unwrap_or(CRATES_IO_REGISTRY); - run_command(config, process, registry_name, api_url, Action::Erase)?; - } else { - config::save_credentials(config, None, registry_name)?; - } - Ok(()) -} - -fn run_command( - config: &Config, - process: &(PathBuf, Vec), - name: &str, - api_url: &str, - action: Action, -) -> CargoResult> { - let cred_proc; - let (exe, args) = if process.0.to_str().unwrap_or("").starts_with("cargo:") { - cred_proc = sysroot_credential(config, process)?; - &cred_proc - } else { - process - }; - if !args.iter().any(|arg| arg.contains("{action}")) { - let msg = |which| { - format!( - "credential process `{}` cannot be used to {}, \ - the credential-process configuration value must pass the \ - `{{action}}` argument in the config to support this command", - exe.display(), - which - ) - }; - match action { - Action::Get => {} - Action::Store(_) => bail!(msg("log in")), - Action::Erase => bail!(msg("log out")), - } - } - let action_str = match action { - Action::Get => "get", - Action::Store(_) => "store", - Action::Erase => "erase", - }; - let args: Vec<_> = args - .iter() - .map(|arg| { - arg.replace("{action}", action_str) - .replace("{name}", name) - .replace("{api_url}", api_url) - }) - .collect(); - - let mut cmd = Command::new(&exe); - cmd.args(args) - .env("CARGO", config.cargo_exe()?) - .env("CARGO_REGISTRY_NAME", name) - .env("CARGO_REGISTRY_API_URL", api_url); - match action { - Action::Get => { - cmd.stdout(Stdio::piped()); - } - Action::Store(_) => { - cmd.stdin(Stdio::piped()); - } - Action::Erase => {} - } - let mut child = cmd.spawn().with_context(|| { - let verb = match action { - Action::Get => "fetch", - Action::Store(_) => "store", - Action::Erase => "erase", - }; - format!( - "failed to execute `{}` to {} authentication token for registry `{}`", - exe.display(), - verb, - name - ) - })?; - let mut token = None; - match &action { - Action::Get => { - let mut buffer = String::new(); - log::debug!("reading into buffer"); - child - .stdout - .as_mut() - .unwrap() - .read_to_string(&mut buffer) - .with_context(|| { - format!( - "failed to read token from registry credential process `{}`", - exe.display() - ) - })?; - if let Some(end) = buffer.find('\n') { - if buffer.len() > end + 1 { - bail!( - "credential process `{}` returned more than one line of output; \ - expected a single token", - exe.display() - ); - } - buffer.truncate(end); - } - token = Some(buffer); - } - Action::Store(token) => { - writeln!(child.stdin.as_ref().unwrap(), "{}", token).with_context(|| { - format!( - "failed to send token to registry credential process `{}`", - exe.display() - ) - })?; - } - Action::Erase => {} - } - let status = child.wait().with_context(|| { - format!( - "registry credential process `{}` exit failure", - exe.display() - ) - })?; - if !status.success() { - let msg = match action { - Action::Get => "failed to authenticate to registry", - Action::Store(_) => "failed to store token to registry", - Action::Erase => "failed to erase token from registry", - }; - return Err(ProcessError::new( - &format!( - "registry credential process `{}` {} `{}`", - exe.display(), - msg, - name - ), - Some(status), - None, - ) - .into()); - } - Ok(token) -} - -/// Gets the path to the libexec processes in the sysroot. -fn sysroot_credential( - config: &Config, - process: &(PathBuf, Vec), -) -> CargoResult<(PathBuf, Vec)> { - let cred_name = process.0.to_str().unwrap().strip_prefix("cargo:").unwrap(); - let cargo = config.cargo_exe()?; - let root = cargo - .parent() - .and_then(|p| p.parent()) - .ok_or_else(|| format_err!("expected cargo path {}", cargo.display()))?; - let exe = root.join("libexec").join(format!( - "cargo-credential-{}{}", - cred_name, - std::env::consts::EXE_SUFFIX - )); - let mut args = process.1.clone(); - if !args.iter().any(|arg| arg == "{action}") { - args.push("{action}".to_string()); - } - Ok((exe, args)) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/resolve.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/resolve.rs deleted file mode 100644 index 599518362..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/resolve.rs +++ /dev/null @@ -1,827 +0,0 @@ -//! High-level APIs for executing the resolver. -//! -//! This module provides functions for running the resolver given a workspace. -//! There are roughly 3 main functions: -//! -//! - `resolve_ws`: A simple, high-level function with no options. -//! - `resolve_ws_with_opts`: A medium-level function with options like -//! user-provided features. This is the most appropriate function to use in -//! most cases. -//! - `resolve_with_previous`: A low-level function for running the resolver, -//! providing the most power and flexibility. - -use crate::core::compiler::{CompileKind, RustcTargetData}; -use crate::core::registry::{LockedPatchDependency, PackageRegistry}; -use crate::core::resolver::features::{ - CliFeatures, FeatureOpts, FeatureResolver, ForceAllTargets, RequestedFeatures, ResolvedFeatures, -}; -use crate::core::resolver::{ - self, HasDevUnits, Resolve, ResolveOpts, ResolveVersion, VersionPreferences, -}; -use crate::core::summary::Summary; -use crate::core::Feature; -use crate::core::{ - GitReference, PackageId, PackageIdSpec, PackageSet, Source, SourceId, Workspace, -}; -use crate::ops; -use crate::sources::PathSource; -use crate::util::errors::CargoResult; -use crate::util::{profile, CanonicalUrl}; -use anyhow::Context as _; -use log::{debug, trace}; -use std::collections::{HashMap, HashSet}; - -/// Result for `resolve_ws_with_opts`. -pub struct WorkspaceResolve<'cfg> { - /// Packages to be downloaded. - pub pkg_set: PackageSet<'cfg>, - /// The resolve for the entire workspace. - /// - /// This may be `None` for things like `cargo install` and `-Zavoid-dev-deps`. - /// This does not include `paths` overrides. - pub workspace_resolve: Option, - /// The narrowed resolve, with the specific features enabled, and only the - /// given package specs requested. - pub targeted_resolve: Resolve, - /// The features activated per package. - pub resolved_features: ResolvedFeatures, -} - -const UNUSED_PATCH_WARNING: &str = "\ -Check that the patched package version and available features are compatible -with the dependency requirements. If the patch has a different version from -what is locked in the Cargo.lock file, run `cargo update` to use the new -version. This may also occur with an optional dependency that is not enabled."; - -/// Resolves all dependencies for the workspace using the previous -/// lock file as a guide if present. -/// -/// This function will also write the result of resolution as a new lock file -/// (unless it is an ephemeral workspace such as `cargo install` or `cargo -/// package`). -/// -/// This is a simple interface used by commands like `clean`, `fetch`, and -/// `package`, which don't specify any options or features. -pub fn resolve_ws<'a>(ws: &Workspace<'a>) -> CargoResult<(PackageSet<'a>, Resolve)> { - let mut registry = PackageRegistry::new(ws.config())?; - let resolve = resolve_with_registry(ws, &mut registry)?; - let packages = get_resolved_packages(&resolve, registry)?; - Ok((packages, resolve)) -} - -/// Resolves dependencies for some packages of the workspace, -/// taking into account `paths` overrides and activated features. -/// -/// This function will also write the result of resolution as a new lock file -/// (unless `Workspace::require_optional_deps` is false, such as `cargo -/// install` or `-Z avoid-dev-deps`), or it is an ephemeral workspace (`cargo -/// install` or `cargo package`). -/// -/// `specs` may be empty, which indicates it should resolve all workspace -/// members. In this case, `opts.all_features` must be `true`. -pub fn resolve_ws_with_opts<'cfg>( - ws: &Workspace<'cfg>, - target_data: &RustcTargetData<'cfg>, - requested_targets: &[CompileKind], - cli_features: &CliFeatures, - specs: &[PackageIdSpec], - has_dev_units: HasDevUnits, - force_all_targets: ForceAllTargets, -) -> CargoResult> { - let mut registry = PackageRegistry::new(ws.config())?; - let mut add_patches = true; - let resolve = if ws.ignore_lock() { - None - } else if ws.require_optional_deps() { - // First, resolve the root_package's *listed* dependencies, as well as - // downloading and updating all remotes and such. - let resolve = resolve_with_registry(ws, &mut registry)?; - // No need to add patches again, `resolve_with_registry` has done it. - add_patches = false; - - // Second, resolve with precisely what we're doing. Filter out - // transitive dependencies if necessary, specify features, handle - // overrides, etc. - let _p = profile::start("resolving with overrides..."); - - add_overrides(&mut registry, ws)?; - - for &(ref replace_spec, ref dep) in ws.root_replace() { - if !resolve - .iter() - .any(|r| replace_spec.matches(r) && !dep.matches_id(r)) - { - ws.config() - .shell() - .warn(format!("package replacement is not used: {}", replace_spec))? - } - - if dep.features().len() != 0 || !dep.uses_default_features() { - ws.config() - .shell() - .warn(format!( - "replacement for `{}` uses the features mechanism. \ - default-features and features will not take effect because the replacement dependency does not support this mechanism", - dep.package_name() - ))? - } - } - - Some(resolve) - } else { - ops::load_pkg_lockfile(ws)? - }; - - let resolved_with_overrides = resolve_with_previous( - &mut registry, - ws, - cli_features, - has_dev_units, - resolve.as_ref(), - None, - specs, - add_patches, - )?; - - let pkg_set = get_resolved_packages(&resolved_with_overrides, registry)?; - - let member_ids = ws - .members_with_features(specs, cli_features)? - .into_iter() - .map(|(p, _fts)| p.package_id()) - .collect::>(); - pkg_set.download_accessible( - &resolved_with_overrides, - &member_ids, - has_dev_units, - requested_targets, - target_data, - force_all_targets, - )?; - - let feature_opts = FeatureOpts::new(ws, has_dev_units, force_all_targets)?; - let resolved_features = FeatureResolver::resolve( - ws, - target_data, - &resolved_with_overrides, - &pkg_set, - cli_features, - specs, - requested_targets, - feature_opts, - )?; - - let no_lib_pkgs = pkg_set.no_lib_pkgs( - &resolved_with_overrides, - &member_ids, - has_dev_units, - requested_targets, - target_data, - force_all_targets, - ); - for (pkg_id, dep_pkgs) in no_lib_pkgs { - for dep_pkg in dep_pkgs { - ws.config().shell().warn(&format!( - "{} ignoring invalid dependency `{}` which is missing a lib target", - pkg_id, - dep_pkg.name(), - ))?; - } - } - - Ok(WorkspaceResolve { - pkg_set, - workspace_resolve: resolve, - targeted_resolve: resolved_with_overrides, - resolved_features, - }) -} - -fn resolve_with_registry<'cfg>( - ws: &Workspace<'cfg>, - registry: &mut PackageRegistry<'cfg>, -) -> CargoResult { - let prev = ops::load_pkg_lockfile(ws)?; - let mut resolve = resolve_with_previous( - registry, - ws, - &CliFeatures::new_all(true), - HasDevUnits::Yes, - prev.as_ref(), - None, - &[], - true, - )?; - - if !ws.is_ephemeral() && ws.require_optional_deps() { - ops::write_pkg_lockfile(ws, &mut resolve)?; - } - Ok(resolve) -} - -/// Resolves all dependencies for a package using an optional previous instance. -/// of resolve to guide the resolution process. -/// -/// This also takes an optional hash set, `to_avoid`, which is a list of package -/// IDs that should be avoided when consulting the previous instance of resolve -/// (often used in pairings with updates). -/// -/// The previous resolve normally comes from a lock file. This function does not -/// read or write lock files from the filesystem. -/// -/// `specs` may be empty, which indicates it should resolve all workspace -/// members. In this case, `opts.all_features` must be `true`. -/// -/// If `register_patches` is true, then entries from the `[patch]` table in -/// the manifest will be added to the given `PackageRegistry`. -pub fn resolve_with_previous<'cfg>( - registry: &mut PackageRegistry<'cfg>, - ws: &Workspace<'cfg>, - cli_features: &CliFeatures, - has_dev_units: HasDevUnits, - previous: Option<&Resolve>, - to_avoid: Option<&HashSet>, - specs: &[PackageIdSpec], - register_patches: bool, -) -> CargoResult { - // We only want one Cargo at a time resolving a crate graph since this can - // involve a lot of frobbing of the global caches. - let _lock = ws.config().acquire_package_cache_lock()?; - - // Here we place an artificial limitation that all non-registry sources - // cannot be locked at more than one revision. This means that if a Git - // repository provides more than one package, they must all be updated in - // step when any of them are updated. - // - // TODO: this seems like a hokey reason to single out the registry as being - // different. - let to_avoid_sources: HashSet = to_avoid - .map(|set| { - set.iter() - .map(|p| p.source_id()) - .filter(|s| !s.is_registry()) - .collect() - }) - .unwrap_or_default(); - - let pre_patch_keep = |p: &PackageId| { - !to_avoid_sources.contains(&p.source_id()) - && match to_avoid { - Some(set) => !set.contains(p), - None => true, - } - }; - - // While registering patches, we will record preferences for particular versions - // of various packages. - let mut version_prefs = VersionPreferences::default(); - - // This is a set of PackageIds of `[patch]` entries, and some related locked PackageIds, for - // which locking should be avoided (but which will be preferred when searching dependencies, - // via prefer_patch_deps below) - let mut avoid_patch_ids = HashSet::new(); - - if register_patches { - for (url, patches) in ws.root_patch()?.iter() { - for patch in patches { - version_prefs.prefer_dependency(patch.clone()); - } - let previous = match previous { - Some(r) => r, - None => { - let patches: Vec<_> = patches.iter().map(|p| (p, None)).collect(); - let unlock_ids = registry.patch(url, &patches)?; - // Since nothing is locked, this shouldn't possibly return anything. - assert!(unlock_ids.is_empty()); - continue; - } - }; - - // This is a list of pairs where the first element of the pair is - // the raw `Dependency` which matches what's listed in `Cargo.toml`. - // The second element is, if present, the "locked" version of - // the `Dependency` as well as the `PackageId` that it previously - // resolved to. This second element is calculated by looking at the - // previous resolve graph, which is primarily what's done here to - // build the `registrations` list. - let mut registrations = Vec::new(); - for dep in patches { - let candidates = || { - previous - .iter() - .chain(previous.unused_patches().iter().cloned()) - .filter(&pre_patch_keep) - }; - - let lock = match candidates().find(|id| dep.matches_id(*id)) { - // If we found an exactly matching candidate in our list of - // candidates, then that's the one to use. - Some(package_id) => { - let mut locked_dep = dep.clone(); - locked_dep.lock_to(package_id); - Some(LockedPatchDependency { - dependency: locked_dep, - package_id, - alt_package_id: None, - }) - } - None => { - // If the candidate does not have a matching source id - // then we may still have a lock candidate. If we're - // loading a v2-encoded resolve graph and `dep` is a - // git dep with `branch = 'master'`, then this should - // also match candidates without `branch = 'master'` - // (which is now treated separately in Cargo). - // - // In this scenario we try to convert candidates located - // in the resolve graph to explicitly having the - // `master` branch (if they otherwise point to - // `DefaultBranch`). If this works and our `dep` - // matches that then this is something we'll lock to. - match candidates().find(|&id| { - match master_branch_git_source(id, previous) { - Some(id) => dep.matches_id(id), - None => false, - } - }) { - Some(id_using_default) => { - let id_using_master = id_using_default.with_source_id( - dep.source_id().with_precise( - id_using_default - .source_id() - .precise() - .map(|s| s.to_string()), - ), - ); - - let mut locked_dep = dep.clone(); - locked_dep.lock_to(id_using_master); - Some(LockedPatchDependency { - dependency: locked_dep, - package_id: id_using_master, - // Note that this is where the magic - // happens, where the resolve graph - // probably has locks pointing to - // DefaultBranch sources, and by including - // this here those will get transparently - // rewritten to Branch("master") which we - // have a lock entry for. - alt_package_id: Some(id_using_default), - }) - } - - // No locked candidate was found - None => None, - } - } - }; - - registrations.push((dep, lock)); - } - - let canonical = CanonicalUrl::new(url)?; - for (orig_patch, unlock_id) in registry.patch(url, ®istrations)? { - // Avoid the locked patch ID. - avoid_patch_ids.insert(unlock_id); - // Also avoid the thing it is patching. - avoid_patch_ids.extend(previous.iter().filter(|id| { - orig_patch.matches_ignoring_source(*id) - && *id.source_id().canonical_url() == canonical - })); - } - } - } - debug!("avoid_patch_ids={:?}", avoid_patch_ids); - - let keep = |p: &PackageId| pre_patch_keep(p) && !avoid_patch_ids.contains(p); - - let dev_deps = ws.require_optional_deps() || has_dev_units == HasDevUnits::Yes; - // In the case where a previous instance of resolve is available, we - // want to lock as many packages as possible to the previous version - // without disturbing the graph structure. - if let Some(r) = previous { - trace!("previous: {:?}", r); - register_previous_locks(ws, registry, r, &keep, dev_deps); - } - - // Prefer to use anything in the previous lock file, aka we want to have conservative updates. - for r in previous { - for id in r.iter() { - if keep(&id) { - debug!("attempting to prefer {}", id); - version_prefs.prefer_package_id(id); - } - } - } - - if register_patches { - registry.lock_patches(); - } - - for member in ws.members() { - registry.add_sources(Some(member.package_id().source_id()))?; - } - - let summaries: Vec<(Summary, ResolveOpts)> = ws - .members_with_features(specs, cli_features)? - .into_iter() - .map(|(member, features)| { - let summary = registry.lock(member.summary().clone()); - ( - summary, - ResolveOpts { - dev_deps, - features: RequestedFeatures::CliFeatures(features), - }, - ) - }) - .collect(); - - let root_replace = ws.root_replace(); - - let replace = match previous { - Some(r) => root_replace - .iter() - .map(|&(ref spec, ref dep)| { - for (&key, &val) in r.replacements().iter() { - if spec.matches(key) && dep.matches_id(val) && keep(&val) { - let mut dep = dep.clone(); - dep.lock_to(val); - return (spec.clone(), dep); - } - } - (spec.clone(), dep.clone()) - }) - .collect::>(), - None => root_replace.to_vec(), - }; - - ws.preload(registry); - let mut resolved = resolver::resolve( - &summaries, - &replace, - registry, - &version_prefs, - Some(ws.config()), - ws.unstable_features() - .require(Feature::public_dependency()) - .is_ok(), - )?; - let patches: Vec<_> = registry - .patches() - .values() - .flat_map(|v| v.iter().cloned()) - .collect(); - resolved.register_used_patches(&patches[..]); - - if register_patches && !resolved.unused_patches().is_empty() { - emit_warnings_of_unused_patches(ws, &resolved, registry)?; - } - - if let Some(previous) = previous { - resolved.merge_from(previous)?; - } - Ok(resolved) -} - -/// Read the `paths` configuration variable to discover all path overrides that -/// have been configured. -pub fn add_overrides<'a>( - registry: &mut PackageRegistry<'a>, - ws: &Workspace<'a>, -) -> CargoResult<()> { - let config = ws.config(); - let paths = match config.get_list("paths")? { - Some(list) => list, - None => return Ok(()), - }; - - let paths = paths.val.iter().map(|(s, def)| { - // The path listed next to the string is the config file in which the - // key was located, so we want to pop off the `.cargo/config` component - // to get the directory containing the `.cargo` folder. - (def.root(config).join(s), def) - }); - - for (path, definition) in paths { - let id = SourceId::for_path(&path)?; - let mut source = PathSource::new_recursive(&path, id, ws.config()); - source.update().with_context(|| { - format!( - "failed to update path override `{}` \ - (defined in `{}`)", - path.display(), - definition - ) - })?; - registry.add_override(Box::new(source)); - } - Ok(()) -} - -pub fn get_resolved_packages<'cfg>( - resolve: &Resolve, - registry: PackageRegistry<'cfg>, -) -> CargoResult> { - let ids: Vec = resolve.iter().collect(); - registry.get(&ids) -} - -/// In this function we're responsible for informing the `registry` of all -/// locked dependencies from the previous lock file we had, `resolve`. -/// -/// This gets particularly tricky for a couple of reasons. The first is that we -/// want all updates to be conservative, so we actually want to take the -/// `resolve` into account (and avoid unnecessary registry updates and such). -/// the second, however, is that we want to be resilient to updates of -/// manifests. For example if a dependency is added or a version is changed we -/// want to make sure that we properly re-resolve (conservatively) instead of -/// providing an opaque error. -/// -/// The logic here is somewhat subtle, but there should be more comments below to -/// clarify things. -/// -/// Note that this function, at the time of this writing, is basically the -/// entire fix for issue #4127. -fn register_previous_locks( - ws: &Workspace<'_>, - registry: &mut PackageRegistry<'_>, - resolve: &Resolve, - keep: &dyn Fn(&PackageId) -> bool, - dev_deps: bool, -) { - let path_pkg = |id: SourceId| { - if !id.is_path() { - return None; - } - if let Ok(path) = id.url().to_file_path() { - if let Ok(pkg) = ws.load(&path.join("Cargo.toml")) { - return Some(pkg); - } - } - None - }; - - // Ok so we've been passed in a `keep` function which basically says "if I - // return `true` then this package wasn't listed for an update on the command - // line". That is, if we run `cargo update -p foo` then `keep(bar)` will return - // `true`, whereas `keep(foo)` will return `false` (roughly speaking). - // - // This isn't actually quite what we want, however. Instead we want to - // further refine this `keep` function with *all transitive dependencies* of - // the packages we're not keeping. For example, consider a case like this: - // - // * There's a crate `log`. - // * There's a crate `serde` which depends on `log`. - // - // Let's say we then run `cargo update -p serde`. This may *also* want to - // update the `log` dependency as our newer version of `serde` may have a - // new minimum version required for `log`. Now this isn't always guaranteed - // to work. What'll happen here is we *won't* lock the `log` dependency nor - // the `log` crate itself, but we will inform the registry "please prefer - // this version of `log`". That way if our newer version of serde works with - // the older version of `log`, we conservatively won't update `log`. If, - // however, nothing else in the dependency graph depends on `log` and the - // newer version of `serde` requires a new version of `log` it'll get pulled - // in (as we didn't accidentally lock it to an old version). - // - // Additionally, here we process all path dependencies listed in the previous - // resolve. They can not only have their dependencies change but also - // the versions of the package change as well. If this ends up happening - // then we want to make sure we don't lock a package ID node that doesn't - // actually exist. Note that we don't do transitive visits of all the - // package's dependencies here as that'll be covered below to poison those - // if they changed. - let mut avoid_locking = HashSet::new(); - registry.add_to_yanked_whitelist(resolve.iter().filter(keep)); - for node in resolve.iter() { - if !keep(&node) { - add_deps(resolve, node, &mut avoid_locking); - } else if let Some(pkg) = path_pkg(node.source_id()) { - if pkg.package_id() != node { - avoid_locking.insert(node); - } - } - } - - // Ok, but the above loop isn't the entire story! Updates to the dependency - // graph can come from two locations, the `cargo update` command or - // manifests themselves. For example a manifest on the filesystem may - // have been updated to have an updated version requirement on `serde`. In - // this case both `keep(serde)` and `keep(log)` return `true` (the `keep` - // that's an argument to this function). We, however, don't want to keep - // either of those! Otherwise we'll get obscure resolve errors about locked - // versions. - // - // To solve this problem we iterate over all packages with path sources - // (aka ones with manifests that are changing) and take a look at all of - // their dependencies. If any dependency does not match something in the - // previous lock file, then we're guaranteed that the main resolver will - // update the source of this dependency no matter what. Knowing this we - // poison all packages from the same source, forcing them all to get - // updated. - // - // This may seem like a heavy hammer, and it is! It means that if you change - // anything from crates.io then all of crates.io becomes unlocked. Note, - // however, that we still want conservative updates. This currently happens - // because the first candidate the resolver picks is the previously locked - // version, and only if that fails to activate to we move on and try - // a different version. (giving the guise of conservative updates) - // - // For example let's say we had `serde = "0.1"` written in our lock file. - // When we later edit this to `serde = "0.1.3"` we don't want to lock serde - // at its old version, 0.1.1. Instead we want to allow it to update to - // `0.1.3` and update its own dependencies (like above). To do this *all - // crates from crates.io* are not locked (aka added to `avoid_locking`). - // For dependencies like `log` their previous version in the lock file will - // come up first before newer version, if newer version are available. - let mut path_deps = ws.members().cloned().collect::>(); - let mut visited = HashSet::new(); - while let Some(member) = path_deps.pop() { - if !visited.insert(member.package_id()) { - continue; - } - let is_ws_member = ws.is_member(&member); - for dep in member.dependencies() { - // If this dependency didn't match anything special then we may want - // to poison the source as it may have been added. If this path - // dependencies is **not** a workspace member, however, and it's an - // optional/non-transitive dependency then it won't be necessarily - // be in our lock file. If this shows up then we avoid poisoning - // this source as otherwise we'd repeatedly update the registry. - // - // TODO: this breaks adding an optional dependency in a - // non-workspace member and then simultaneously editing the - // dependency on that crate to enable the feature. For now, - // this bug is better than the always-updating registry though. - if !is_ws_member && (dep.is_optional() || !dep.is_transitive()) { - continue; - } - - // If dev-dependencies aren't being resolved, skip them. - if !dep.is_transitive() && !dev_deps { - continue; - } - - // If this is a path dependency, then try to push it onto our - // worklist. - if let Some(pkg) = path_pkg(dep.source_id()) { - path_deps.push(pkg); - continue; - } - - // If we match *anything* in the dependency graph then we consider - // ourselves all ok, and assume that we'll resolve to that. - if resolve.iter().any(|id| dep.matches_ignoring_source(id)) { - continue; - } - - // Ok if nothing matches, then we poison the source of these - // dependencies and the previous lock file. - debug!( - "poisoning {} because {} looks like it changed {}", - dep.source_id(), - member.package_id(), - dep.package_name() - ); - for id in resolve - .iter() - .filter(|id| id.source_id() == dep.source_id()) - { - add_deps(resolve, id, &mut avoid_locking); - } - } - } - - // Alright now that we've got our new, fresh, shiny, and refined `keep` - // function let's put it to action. Take a look at the previous lock file, - // filter everything by this callback, and then shove everything else into - // the registry as a locked dependency. - let keep = |id: &PackageId| keep(id) && !avoid_locking.contains(id); - - registry.clear_lock(); - for node in resolve.iter().filter(keep) { - let deps = resolve - .deps_not_replaced(node) - .map(|p| p.0) - .filter(keep) - .collect::>(); - - // In the v2 lockfile format and prior the `branch=master` dependency - // directive was serialized the same way as the no-branch-listed - // directive. Nowadays in Cargo, however, these two directives are - // considered distinct and are no longer represented the same way. To - // maintain compatibility with older lock files we register locked nodes - // for *both* the master branch and the default branch. - // - // Note that this is only applicable for loading older resolves now at - // this point. All new lock files are encoded as v3-or-later, so this is - // just compat for loading an old lock file successfully. - if let Some(node) = master_branch_git_source(node, resolve) { - registry.register_lock(node, deps.clone()); - } - - registry.register_lock(node, deps); - } - - /// Recursively add `node` and all its transitive dependencies to `set`. - fn add_deps(resolve: &Resolve, node: PackageId, set: &mut HashSet) { - if !set.insert(node) { - return; - } - debug!("ignoring any lock pointing directly at {}", node); - for (dep, _) in resolve.deps_not_replaced(node) { - add_deps(resolve, dep, set); - } - } -} - -fn master_branch_git_source(id: PackageId, resolve: &Resolve) -> Option { - if resolve.version() <= ResolveVersion::V2 { - let source = id.source_id(); - if let Some(GitReference::DefaultBranch) = source.git_reference() { - let new_source = - SourceId::for_git(source.url(), GitReference::Branch("master".to_string())) - .unwrap() - .with_precise(source.precise().map(|s| s.to_string())); - return Some(id.with_source_id(new_source)); - } - } - None -} - -/// Emits warnings of unused patches case by case. -/// -/// This function does its best to provide more targeted and helpful -/// (such as showing close candidates that failed to match). However, that's -/// not terribly easy to do, so just show a general help message if we cannot. -fn emit_warnings_of_unused_patches( - ws: &Workspace<'_>, - resolve: &Resolve, - registry: &PackageRegistry<'_>, -) -> CargoResult<()> { - const MESSAGE: &str = "was not used in the crate graph."; - - // Patch package with the source URLs being patch - let mut patch_pkgid_to_urls = HashMap::new(); - for (url, summaries) in registry.patches().iter() { - for summary in summaries.iter() { - patch_pkgid_to_urls - .entry(summary.package_id()) - .or_insert_with(HashSet::new) - .insert(url); - } - } - - // pkg name -> all source IDs of under the same pkg name - let mut source_ids_grouped_by_pkg_name = HashMap::new(); - for pkgid in resolve.iter() { - source_ids_grouped_by_pkg_name - .entry(pkgid.name()) - .or_insert_with(HashSet::new) - .insert(pkgid.source_id()); - } - - let mut unemitted_unused_patches = Vec::new(); - for unused in resolve.unused_patches().iter() { - // Show alternative source URLs if the source URLs being patch - // cannot not be found in the crate graph. - match ( - source_ids_grouped_by_pkg_name.get(&unused.name()), - patch_pkgid_to_urls.get(unused), - ) { - (Some(ids), Some(patched_urls)) - if ids - .iter() - .all(|id| !patched_urls.contains(id.canonical_url())) => - { - use std::fmt::Write; - let mut msg = String::new(); - writeln!(&mut msg, "Patch `{}` {}", unused, MESSAGE)?; - write!( - &mut msg, - "Perhaps you misspell the source URL being patched.\n\ - Possible URLs for `[patch.]`:", - )?; - for id in ids.iter() { - write!(&mut msg, "\n {}", id.display_registry_name())?; - } - ws.config().shell().warn(msg)?; - } - _ => unemitted_unused_patches.push(unused), - } - } - - // Show general help message. - if !unemitted_unused_patches.is_empty() { - let warnings: Vec<_> = unemitted_unused_patches - .iter() - .map(|pkgid| format!("Patch `{}` {}", pkgid, MESSAGE)) - .collect(); - ws.config() - .shell() - .warn(format!("{}\n{}", warnings.join("\n"), UNUSED_PATCH_WARNING))?; - } - - return Ok(()); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/format/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/format/mod.rs deleted file mode 100644 index b746fb984..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/format/mod.rs +++ /dev/null @@ -1,135 +0,0 @@ -use std::fmt; - -use anyhow::{bail, Error}; - -use self::parse::{Parser, RawChunk}; -use super::{Graph, Node}; - -mod parse; - -enum Chunk { - Raw(String), - Package, - License, - Repository, - Features, - LibName, -} - -pub struct Pattern(Vec); - -impl Pattern { - pub fn new(format: &str) -> Result { - let mut chunks = vec![]; - - for raw in Parser::new(format) { - let chunk = match raw { - RawChunk::Text(text) => Chunk::Raw(text.to_owned()), - RawChunk::Argument("p") => Chunk::Package, - RawChunk::Argument("l") => Chunk::License, - RawChunk::Argument("r") => Chunk::Repository, - RawChunk::Argument("f") => Chunk::Features, - RawChunk::Argument("lib") => Chunk::LibName, - RawChunk::Argument(a) => { - bail!("unsupported pattern `{}`", a); - } - RawChunk::Error(err) => bail!("{}", err), - }; - chunks.push(chunk); - } - - Ok(Pattern(chunks)) - } - - pub fn display<'a>(&'a self, graph: &'a Graph<'a>, node_index: usize) -> Display<'a> { - Display { - pattern: self, - graph, - node_index, - } - } -} - -pub struct Display<'a> { - pattern: &'a Pattern, - graph: &'a Graph<'a>, - node_index: usize, -} - -impl<'a> fmt::Display for Display<'a> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let node = self.graph.node(self.node_index); - match node { - Node::Package { - package_id, - features, - .. - } => { - let package = self.graph.package_for_id(*package_id); - for chunk in &self.pattern.0 { - match chunk { - Chunk::Raw(s) => fmt.write_str(s)?, - Chunk::Package => { - let proc_macro_suffix = if package.proc_macro() { - " (proc-macro)" - } else { - "" - }; - write!( - fmt, - "{} v{}{}", - package.name(), - package.version(), - proc_macro_suffix - )?; - - let source_id = package.package_id().source_id(); - if !source_id.is_default_registry() { - write!(fmt, " ({})", source_id)?; - } - } - Chunk::License => { - if let Some(license) = &package.manifest().metadata().license { - write!(fmt, "{}", license)?; - } - } - Chunk::Repository => { - if let Some(repository) = &package.manifest().metadata().repository { - write!(fmt, "{}", repository)?; - } - } - Chunk::Features => { - write!(fmt, "{}", features.join(","))?; - } - Chunk::LibName => { - if let Some(target) = package - .manifest() - .targets() - .iter() - .find(|target| target.is_lib()) - { - write!(fmt, "{}", target.crate_name())?; - } - } - } - } - } - Node::Feature { name, node_index } => { - let for_node = self.graph.node(*node_index); - match for_node { - Node::Package { package_id, .. } => { - write!(fmt, "{} feature \"{}\"", package_id.name(), name)?; - if self.graph.is_cli_feature(self.node_index) { - write!(fmt, " (command-line)")?; - } - } - // The node_index in Node::Feature must point to a package - // node, see `add_feature`. - _ => panic!("unexpected feature node {:?}", for_node), - } - } - } - - Ok(()) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/format/parse.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/format/parse.rs deleted file mode 100644 index ee112fbee..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/format/parse.rs +++ /dev/null @@ -1,123 +0,0 @@ -//! Parser for the `--format` string for `cargo tree`. - -use std::iter; -use std::str; - -pub enum RawChunk<'a> { - /// Raw text to include in the output. - Text(&'a str), - /// A substitution to place in the output. For example, the argument "p" - /// emits the package name. - Argument(&'a str), - /// Indicates an error in the format string. The given string is a - /// human-readable message explaining the error. - Error(&'static str), -} - -/// `cargo tree` format parser. -/// -/// The format string indicates how each package should be displayed. It -/// includes simple markers surrounded in curly braces that will be -/// substituted with their corresponding values. For example, the text -/// "{p} license:{l}" will substitute the `{p}` with the package name/version -/// (and optionally source), and the `{l}` will be the license from -/// `Cargo.toml`. -/// -/// Substitutions are alphabetic characters between curly braces, like `{p}` -/// or `{foo}`. The actual interpretation of these are done in the `Pattern` -/// struct. -/// -/// Bare curly braces can be included in the output with double braces like -/// `{{` will include a single `{`, similar to Rust's format strings. -pub struct Parser<'a> { - s: &'a str, - it: iter::Peekable>, -} - -impl<'a> Parser<'a> { - pub fn new(s: &'a str) -> Parser<'a> { - Parser { - s, - it: s.char_indices().peekable(), - } - } - - fn consume(&mut self, ch: char) -> bool { - match self.it.peek() { - Some(&(_, c)) if c == ch => { - self.it.next(); - true - } - _ => false, - } - } - - fn argument(&mut self) -> RawChunk<'a> { - RawChunk::Argument(self.name()) - } - - fn name(&mut self) -> &'a str { - let start = match self.it.peek() { - Some(&(pos, ch)) if ch.is_alphabetic() => { - self.it.next(); - pos - } - _ => return "", - }; - - loop { - match self.it.peek() { - Some(&(_, ch)) if ch.is_alphanumeric() => { - self.it.next(); - } - Some(&(end, _)) => return &self.s[start..end], - None => return &self.s[start..], - } - } - } - - fn text(&mut self, start: usize) -> RawChunk<'a> { - while let Some(&(pos, ch)) = self.it.peek() { - match ch { - '{' | '}' => return RawChunk::Text(&self.s[start..pos]), - _ => { - self.it.next(); - } - } - } - RawChunk::Text(&self.s[start..]) - } -} - -impl<'a> Iterator for Parser<'a> { - type Item = RawChunk<'a>; - - fn next(&mut self) -> Option> { - match self.it.peek() { - Some(&(_, '{')) => { - self.it.next(); - if self.consume('{') { - Some(RawChunk::Text("{")) - } else { - let chunk = self.argument(); - if self.consume('}') { - Some(chunk) - } else { - for _ in &mut self.it {} - Some(RawChunk::Error("expected '}'")) - } - } - } - Some(&(_, '}')) => { - self.it.next(); - if self.consume('}') { - Some(RawChunk::Text("}")) - } else { - Some(RawChunk::Error("unexpected '}'")) - } - } - Some(&(i, _)) => Some(self.text(i)), - None => None, - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/graph.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/graph.rs deleted file mode 100644 index 283382949..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/graph.rs +++ /dev/null @@ -1,660 +0,0 @@ -//! Code for building the graph used by `cargo tree`. - -use super::TreeOptions; -use crate::core::compiler::{CompileKind, RustcTargetData}; -use crate::core::dependency::DepKind; -use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; -use crate::core::resolver::Resolve; -use crate::core::{FeatureMap, FeatureValue, Package, PackageId, PackageIdSpec, Workspace}; -use crate::util::interning::InternedString; -use crate::util::CargoResult; -use std::collections::{HashMap, HashSet}; - -#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] -pub enum Node { - Package { - package_id: PackageId, - /// Features that are enabled on this package. - features: Vec, - kind: CompileKind, - }, - Feature { - /// Index of the package node this feature is for. - node_index: usize, - /// Name of the feature. - name: InternedString, - }, -} - -/// The kind of edge, for separating dependencies into different sections. -#[derive(Debug, Copy, Hash, Eq, Clone, PartialEq)] -pub enum EdgeKind { - Dep(DepKind), - Feature, -} - -/// Set of outgoing edges for a single node. -/// -/// Edges are separated by the edge kind (`DepKind` or `Feature`). This is -/// primarily done so that the output can easily display separate sections -/// like `[build-dependencies]`. -/// -/// The value is a `Vec` because each edge kind can have multiple outgoing -/// edges. For example, package "foo" can have multiple normal dependencies. -#[derive(Clone)] -struct Edges(HashMap>); - -impl Edges { - fn new() -> Edges { - Edges(HashMap::new()) - } - - /// Adds an edge pointing to the given node. - fn add_edge(&mut self, kind: EdgeKind, index: usize) { - let indexes = self.0.entry(kind).or_default(); - if !indexes.contains(&index) { - indexes.push(index) - } - } -} - -/// A graph of dependencies. -pub struct Graph<'a> { - nodes: Vec, - /// The indexes of `edges` correspond to the `nodes`. That is, `edges[0]` - /// is the set of outgoing edges for `nodes[0]`. They should always be in - /// sync. - edges: Vec, - /// Index maps a node to an index, for fast lookup. - index: HashMap, - /// Map for looking up packages. - package_map: HashMap, - /// Set of indexes of feature nodes that were added via the command-line. - /// - /// For example `--features foo` will mark the "foo" node here. - cli_features: HashSet, - /// Map of dependency names, used for building internal feature map for - /// dep_name/feat_name syntax. - /// - /// Key is the index of a package node, value is a map of dep_name to a - /// set of `(pkg_node_index, is_optional)`. - dep_name_map: HashMap>>, -} - -impl<'a> Graph<'a> { - fn new(package_map: HashMap) -> Graph<'a> { - Graph { - nodes: Vec::new(), - edges: Vec::new(), - index: HashMap::new(), - package_map, - cli_features: HashSet::new(), - dep_name_map: HashMap::new(), - } - } - - /// Adds a new node to the graph, returning its new index. - fn add_node(&mut self, node: Node) -> usize { - let from_index = self.nodes.len(); - self.nodes.push(node); - self.edges.push(Edges::new()); - self.index - .insert(self.nodes[from_index].clone(), from_index); - from_index - } - - /// Returns a list of nodes the given node index points to for the given kind. - pub fn connected_nodes(&self, from: usize, kind: &EdgeKind) -> Vec { - match self.edges[from].0.get(kind) { - Some(indexes) => { - // Created a sorted list for consistent output. - let mut indexes = indexes.clone(); - indexes.sort_unstable_by(|a, b| self.nodes[*a].cmp(&self.nodes[*b])); - indexes - } - None => Vec::new(), - } - } - - /// Returns `true` if the given node has any outgoing edges. - pub fn has_outgoing_edges(&self, index: usize) -> bool { - !self.edges[index].0.is_empty() - } - - /// Gets a node by index. - pub fn node(&self, index: usize) -> &Node { - &self.nodes[index] - } - - /// Given a slice of PackageIds, returns the indexes of all nodes that match. - pub fn indexes_from_ids(&self, package_ids: &[PackageId]) -> Vec { - let mut result: Vec<(&Node, usize)> = self - .nodes - .iter() - .enumerate() - .filter(|(_i, node)| match node { - Node::Package { package_id, .. } => package_ids.contains(package_id), - _ => false, - }) - .map(|(i, node)| (node, i)) - .collect(); - // Sort for consistent output (the same command should always return - // the same output). "unstable" since nodes should always be unique. - result.sort_unstable(); - result.into_iter().map(|(_node, i)| i).collect() - } - - pub fn package_for_id(&self, id: PackageId) -> &Package { - self.package_map[&id] - } - - fn package_id_for_index(&self, index: usize) -> PackageId { - match self.nodes[index] { - Node::Package { package_id, .. } => package_id, - Node::Feature { .. } => panic!("unexpected feature node"), - } - } - - /// Returns `true` if the given feature node index is a feature enabled - /// via the command-line. - pub fn is_cli_feature(&self, index: usize) -> bool { - self.cli_features.contains(&index) - } - - /// Returns a new graph by removing all nodes not reachable from the - /// given nodes. - pub fn from_reachable(&self, roots: &[usize]) -> Graph<'a> { - // Graph built with features does not (yet) support --duplicates. - assert!(self.dep_name_map.is_empty()); - let mut new_graph = Graph::new(self.package_map.clone()); - // Maps old index to new index. None if not yet visited. - let mut remap: Vec> = vec![None; self.nodes.len()]; - - fn visit( - graph: &Graph<'_>, - new_graph: &mut Graph<'_>, - remap: &mut Vec>, - index: usize, - ) -> usize { - if let Some(new_index) = remap[index] { - // Already visited. - return new_index; - } - let node = graph.node(index).clone(); - let new_from = new_graph.add_node(node); - remap[index] = Some(new_from); - // Visit dependencies. - for (edge_kind, edge_indexes) in &graph.edges[index].0 { - for edge_index in edge_indexes { - let new_to_index = visit(graph, new_graph, remap, *edge_index); - new_graph.edges[new_from].add_edge(*edge_kind, new_to_index); - } - } - new_from - } - - // Walk the roots, generating a new graph as it goes along. - for root in roots { - visit(self, &mut new_graph, &mut remap, *root); - } - - new_graph - } - - /// Inverts the direction of all edges. - pub fn invert(&mut self) { - let mut new_edges = vec![Edges::new(); self.edges.len()]; - for (from_idx, node_edges) in self.edges.iter().enumerate() { - for (kind, edges) in &node_edges.0 { - for edge_idx in edges { - new_edges[*edge_idx].add_edge(*kind, from_idx); - } - } - } - self.edges = new_edges; - } - - /// Returns a list of nodes that are considered "duplicates" (same package - /// name, with different versions/features/source/etc.). - pub fn find_duplicates(&self) -> Vec { - // Graph built with features does not (yet) support --duplicates. - assert!(self.dep_name_map.is_empty()); - - // Collect a map of package name to Vec<(&Node, usize)>. - let mut packages = HashMap::new(); - for (i, node) in self.nodes.iter().enumerate() { - if let Node::Package { package_id, .. } = node { - packages - .entry(package_id.name()) - .or_insert_with(Vec::new) - .push((node, i)); - } - } - - let mut dupes: Vec<(&Node, usize)> = packages - .into_iter() - .filter(|(_name, indexes)| indexes.len() > 1) - .flat_map(|(_name, indexes)| indexes) - .collect(); - // For consistent output. - dupes.sort_unstable(); - dupes.into_iter().map(|(_node, i)| i).collect() - } -} - -/// Builds the graph. -pub fn build<'a>( - ws: &Workspace<'_>, - resolve: &Resolve, - resolved_features: &ResolvedFeatures, - specs: &[PackageIdSpec], - cli_features: &CliFeatures, - target_data: &RustcTargetData<'_>, - requested_kinds: &[CompileKind], - package_map: HashMap, - opts: &TreeOptions, -) -> CargoResult> { - let mut graph = Graph::new(package_map); - let mut members_with_features = ws.members_with_features(specs, cli_features)?; - members_with_features.sort_unstable_by_key(|e| e.0.package_id()); - for (member, cli_features) in members_with_features { - let member_id = member.package_id(); - let features_for = FeaturesFor::from_for_host(member.proc_macro()); - for kind in requested_kinds { - let member_index = add_pkg( - &mut graph, - resolve, - resolved_features, - member_id, - features_for, - target_data, - *kind, - opts, - ); - if opts.graph_features { - let fmap = resolve.summary(member_id).features(); - add_cli_features(&mut graph, member_index, &cli_features, fmap); - } - } - } - if opts.graph_features { - add_internal_features(&mut graph, resolve); - } - Ok(graph) -} - -/// Adds a single package node (if it does not already exist). -/// -/// This will also recursively add all of its dependencies. -/// -/// Returns the index to the package node. -fn add_pkg( - graph: &mut Graph<'_>, - resolve: &Resolve, - resolved_features: &ResolvedFeatures, - package_id: PackageId, - features_for: FeaturesFor, - target_data: &RustcTargetData<'_>, - requested_kind: CompileKind, - opts: &TreeOptions, -) -> usize { - let node_features = resolved_features.activated_features(package_id, features_for); - let node_kind = match features_for { - FeaturesFor::HostDep => CompileKind::Host, - FeaturesFor::NormalOrDev => requested_kind, - }; - let node = Node::Package { - package_id, - features: node_features, - kind: node_kind, - }; - if let Some(idx) = graph.index.get(&node) { - return *idx; - } - let from_index = graph.add_node(node); - // Compute the dep name map which is later used for foo/bar feature lookups. - let mut dep_name_map: HashMap> = HashMap::new(); - let mut deps: Vec<_> = resolve.deps(package_id).collect(); - deps.sort_unstable_by_key(|(dep_id, _)| *dep_id); - let show_all_targets = opts.target == super::Target::All; - for (dep_id, deps) in deps { - let mut deps: Vec<_> = deps - .iter() - // This filter is *similar* to the one found in `unit_dependencies::compute_deps`. - // Try to keep them in sync! - .filter(|dep| { - let kind = match (node_kind, dep.kind()) { - (CompileKind::Host, _) => CompileKind::Host, - (_, DepKind::Build) => CompileKind::Host, - (_, DepKind::Normal) => node_kind, - (_, DepKind::Development) => node_kind, - }; - // Filter out inactivated targets. - if !show_all_targets && !target_data.dep_platform_activated(dep, kind) { - return false; - } - // Filter out dev-dependencies if requested. - if !opts.edge_kinds.contains(&EdgeKind::Dep(dep.kind())) { - return false; - } - if dep.is_optional() { - // If the new feature resolver does not enable this - // optional dep, then don't use it. - if !resolved_features.is_dep_activated( - package_id, - features_for, - dep.name_in_toml(), - ) { - return false; - } - } - true - }) - .collect(); - - // This dependency is eliminated from the dependency tree under - // the current target and feature set. - if deps.is_empty() { - continue; - } - - deps.sort_unstable_by_key(|dep| dep.name_in_toml()); - let dep_pkg = graph.package_map[&dep_id]; - - for dep in deps { - let dep_features_for = if dep.is_build() || dep_pkg.proc_macro() { - FeaturesFor::HostDep - } else { - features_for - }; - let dep_index = add_pkg( - graph, - resolve, - resolved_features, - dep_id, - dep_features_for, - target_data, - requested_kind, - opts, - ); - if opts.graph_features { - // Add the dependency node with feature nodes in-between. - dep_name_map - .entry(dep.name_in_toml()) - .or_default() - .insert((dep_index, dep.is_optional())); - if dep.uses_default_features() { - add_feature( - graph, - InternedString::new("default"), - Some(from_index), - dep_index, - EdgeKind::Dep(dep.kind()), - ); - } - for feature in dep.features().iter() { - add_feature( - graph, - *feature, - Some(from_index), - dep_index, - EdgeKind::Dep(dep.kind()), - ); - } - if !dep.uses_default_features() && dep.features().is_empty() { - // No features, use a direct connection. - graph.edges[from_index].add_edge(EdgeKind::Dep(dep.kind()), dep_index); - } - } else { - graph.edges[from_index].add_edge(EdgeKind::Dep(dep.kind()), dep_index); - } - } - } - if opts.graph_features { - assert!(graph - .dep_name_map - .insert(from_index, dep_name_map) - .is_none()); - } - - from_index -} - -/// Adds a feature node between two nodes. -/// -/// That is, it adds the following: -/// -/// ```text -/// from -Edge-> featname -Edge::Feature-> to -/// ``` -/// -/// Returns a tuple `(missing, index)`. -/// `missing` is true if this feature edge was already added. -/// `index` is the index of the index in the graph of the `Feature` node. -fn add_feature( - graph: &mut Graph<'_>, - name: InternedString, - from: Option, - to: usize, - kind: EdgeKind, -) -> (bool, usize) { - // `to` *must* point to a package node. - assert!(matches! {graph.nodes[to], Node::Package{..}}); - let node = Node::Feature { - node_index: to, - name, - }; - let (missing, node_index) = match graph.index.get(&node) { - Some(idx) => (false, *idx), - None => (true, graph.add_node(node)), - }; - if let Some(from) = from { - graph.edges[from].add_edge(kind, node_index); - } - graph.edges[node_index].add_edge(EdgeKind::Feature, to); - (missing, node_index) -} - -/// Adds nodes for features requested on the command-line for the given member. -/// -/// Feature nodes are added as "roots" (i.e., they have no "from" index), -/// because they come from the outside world. They usually only appear with -/// `--invert`. -fn add_cli_features( - graph: &mut Graph<'_>, - package_index: usize, - cli_features: &CliFeatures, - feature_map: &FeatureMap, -) { - // NOTE: Recursive enabling of features will be handled by - // add_internal_features. - - // Create a set of feature names requested on the command-line. - let mut to_add: HashSet = HashSet::new(); - if cli_features.all_features { - to_add.extend(feature_map.keys().map(|feat| FeatureValue::Feature(*feat))); - } else { - if cli_features.uses_default_features { - to_add.insert(FeatureValue::Feature(InternedString::new("default"))); - } - to_add.extend(cli_features.features.iter().cloned()); - }; - - // Add each feature as a node, and mark as "from command-line" in graph.cli_features. - for fv in to_add { - match fv { - FeatureValue::Feature(feature) => { - let index = add_feature(graph, feature, None, package_index, EdgeKind::Feature).1; - graph.cli_features.insert(index); - } - // This is enforced by CliFeatures. - FeatureValue::Dep { .. } => panic!("unexpected cli dep feature {}", fv), - FeatureValue::DepFeature { - dep_name, - dep_feature, - weak, - } => { - let dep_connections = match graph.dep_name_map[&package_index].get(&dep_name) { - // Clone to deal with immutable borrow of `graph`. :( - Some(dep_connections) => dep_connections.clone(), - None => { - // --features bar?/feat where `bar` is not activated should be ignored. - // If this wasn't weak, then this is a bug. - if weak { - continue; - } - panic!( - "missing dep graph connection for CLI feature `{}` for member {:?}\n\ - Please file a bug report at https://github.com/rust-lang/cargo/issues", - fv, - graph.nodes.get(package_index) - ); - } - }; - for (dep_index, is_optional) in dep_connections { - if is_optional { - // Activate the optional dep on self. - let index = - add_feature(graph, dep_name, None, package_index, EdgeKind::Feature).1; - graph.cli_features.insert(index); - } - let index = - add_feature(graph, dep_feature, None, dep_index, EdgeKind::Feature).1; - graph.cli_features.insert(index); - } - } - } - } -} - -/// Recursively adds connections between features in the `[features]` table -/// for every package. -fn add_internal_features(graph: &mut Graph<'_>, resolve: &Resolve) { - // Collect features already activated by dependencies or command-line. - let feature_nodes: Vec<(PackageId, usize, usize, InternedString)> = graph - .nodes - .iter() - .enumerate() - .filter_map(|(i, node)| match node { - Node::Package { .. } => None, - Node::Feature { node_index, name } => { - let package_id = graph.package_id_for_index(*node_index); - Some((package_id, *node_index, i, *name)) - } - }) - .collect(); - - for (package_id, package_index, feature_index, feature_name) in feature_nodes { - add_feature_rec( - graph, - resolve, - feature_name, - package_id, - feature_index, - package_index, - ); - } -} - -/// Recursively add feature nodes for all features enabled by the given feature. -/// -/// `from` is the index of the node that enables this feature. -/// `package_index` is the index of the package node for the feature. -fn add_feature_rec( - graph: &mut Graph<'_>, - resolve: &Resolve, - feature_name: InternedString, - package_id: PackageId, - from: usize, - package_index: usize, -) { - let feature_map = resolve.summary(package_id).features(); - let fvs = match feature_map.get(&feature_name) { - Some(fvs) => fvs, - None => return, - }; - for fv in fvs { - match fv { - FeatureValue::Feature(dep_name) => { - let (missing, feat_index) = add_feature( - graph, - *dep_name, - Some(from), - package_index, - EdgeKind::Feature, - ); - // Don't recursive if the edge already exists to deal with cycles. - if missing { - add_feature_rec( - graph, - resolve, - *dep_name, - package_id, - feat_index, - package_index, - ); - } - } - // Dependencies are already shown in the graph as dep edges. I'm - // uncertain whether or not this might be confusing in some cases - // (like feature `"somefeat" = ["dep:somedep"]`), so maybe in the - // future consider explicitly showing this? - FeatureValue::Dep { .. } => {} - FeatureValue::DepFeature { - dep_name, - dep_feature, - // Note: `weak` is mostly handled when the graph is built in - // `is_dep_activated` which is responsible for skipping - // unactivated weak dependencies. Here it is only used to - // determine if the feature of the dependency name is - // activated on self. - weak, - } => { - let dep_indexes = match graph.dep_name_map[&package_index].get(dep_name) { - Some(indexes) => indexes.clone(), - None => { - log::debug!( - "enabling feature {} on {}, found {}/{}, \ - dep appears to not be enabled", - feature_name, - package_id, - dep_name, - dep_feature - ); - continue; - } - }; - for (dep_index, is_optional) in dep_indexes { - let dep_pkg_id = graph.package_id_for_index(dep_index); - if is_optional && !weak { - // Activate the optional dep on self. - add_feature( - graph, - *dep_name, - Some(from), - package_index, - EdgeKind::Feature, - ); - } - let (missing, feat_index) = add_feature( - graph, - *dep_feature, - Some(from), - dep_index, - EdgeKind::Feature, - ); - if missing { - add_feature_rec( - graph, - resolve, - *dep_feature, - dep_pkg_id, - feat_index, - dep_index, - ); - } - } - } - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/mod.rs deleted file mode 100644 index 4959e04e5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/tree/mod.rs +++ /dev/null @@ -1,443 +0,0 @@ -//! Implementation of `cargo tree`. - -use self::format::Pattern; -use crate::core::compiler::{CompileKind, RustcTargetData}; -use crate::core::dependency::DepKind; -use crate::core::resolver::{features::CliFeatures, ForceAllTargets, HasDevUnits}; -use crate::core::{Package, PackageId, PackageIdSpec, Workspace}; -use crate::ops::{self, Packages}; -use crate::util::{CargoResult, Config}; -use crate::{drop_print, drop_println}; -use anyhow::Context; -use graph::Graph; -use std::collections::{HashMap, HashSet}; -use std::str::FromStr; - -mod format; -mod graph; - -pub use {graph::EdgeKind, graph::Node}; - -pub struct TreeOptions { - pub cli_features: CliFeatures, - /// The packages to display the tree for. - pub packages: Packages, - /// The platform to filter for. - pub target: Target, - /// The dependency kinds to display. - pub edge_kinds: HashSet, - pub invert: Vec, - /// The packages to prune from the display of the dependency tree. - pub pkgs_to_prune: Vec, - /// The style of prefix for each line. - pub prefix: Prefix, - /// If `true`, duplicates will be repeated. - /// If `false`, duplicates will be marked with `*`, and their dependencies - /// won't be shown. - pub no_dedupe: bool, - /// If `true`, run in a special mode where it will scan for packages that - /// appear with different versions, and report if any where found. Implies - /// `invert`. - pub duplicates: bool, - /// The style of characters to use. - pub charset: Charset, - /// A format string indicating how each package should be displayed. - pub format: String, - /// Includes features in the tree as separate nodes. - pub graph_features: bool, - /// Maximum display depth of the dependency tree. - pub max_display_depth: u32, - /// Exculdes proc-macro dependencies. - pub no_proc_macro: bool, -} - -#[derive(PartialEq)] -pub enum Target { - Host, - Specific(Vec), - All, -} - -impl Target { - pub fn from_cli(targets: Vec) -> Target { - match targets.len() { - 0 => Target::Host, - 1 if targets[0] == "all" => Target::All, - _ => Target::Specific(targets), - } - } -} - -pub enum Charset { - Utf8, - Ascii, -} - -impl FromStr for Charset { - type Err = &'static str; - - fn from_str(s: &str) -> Result { - match s { - "utf8" => Ok(Charset::Utf8), - "ascii" => Ok(Charset::Ascii), - _ => Err("invalid charset"), - } - } -} - -#[derive(Clone, Copy)] -pub enum Prefix { - None, - Indent, - Depth, -} - -impl FromStr for Prefix { - type Err = &'static str; - - fn from_str(s: &str) -> Result { - match s { - "none" => Ok(Prefix::None), - "indent" => Ok(Prefix::Indent), - "depth" => Ok(Prefix::Depth), - _ => Err("invalid prefix"), - } - } -} - -struct Symbols { - down: &'static str, - tee: &'static str, - ell: &'static str, - right: &'static str, -} - -static UTF8_SYMBOLS: Symbols = Symbols { - down: "โ”‚", - tee: "โ”œ", - ell: "โ””", - right: "โ”€", -}; - -static ASCII_SYMBOLS: Symbols = Symbols { - down: "|", - tee: "|", - ell: "`", - right: "-", -}; - -/// Entry point for the `cargo tree` command. -pub fn build_and_print(ws: &Workspace<'_>, opts: &TreeOptions) -> CargoResult<()> { - let requested_targets = match &opts.target { - Target::All | Target::Host => Vec::new(), - Target::Specific(t) => t.clone(), - }; - // TODO: Target::All is broken with -Zfeatures=itarget. To handle that properly, - // `FeatureResolver` will need to be taught what "all" means. - let requested_kinds = CompileKind::from_requested_targets(ws.config(), &requested_targets)?; - let target_data = RustcTargetData::new(ws, &requested_kinds)?; - let specs = opts.packages.to_package_id_specs(ws)?; - let has_dev = if opts - .edge_kinds - .contains(&EdgeKind::Dep(DepKind::Development)) - { - HasDevUnits::Yes - } else { - HasDevUnits::No - }; - let force_all = if opts.target == Target::All { - ForceAllTargets::Yes - } else { - ForceAllTargets::No - }; - let ws_resolve = ops::resolve_ws_with_opts( - ws, - &target_data, - &requested_kinds, - &opts.cli_features, - &specs, - has_dev, - force_all, - )?; - - let package_map: HashMap = ws_resolve - .pkg_set - .packages() - .map(|pkg| (pkg.package_id(), pkg)) - .collect(); - - let mut graph = graph::build( - ws, - &ws_resolve.targeted_resolve, - &ws_resolve.resolved_features, - &specs, - &opts.cli_features, - &target_data, - &requested_kinds, - package_map, - opts, - )?; - - let root_specs = if opts.invert.is_empty() { - specs - } else { - opts.invert - .iter() - .map(|p| PackageIdSpec::parse(p)) - .collect::>>()? - }; - let root_ids = ws_resolve.targeted_resolve.specs_to_ids(&root_specs)?; - let root_indexes = graph.indexes_from_ids(&root_ids); - - let root_indexes = if opts.duplicates { - // `-d -p foo` will only show duplicates within foo's subtree - graph = graph.from_reachable(root_indexes.as_slice()); - graph.find_duplicates() - } else { - root_indexes - }; - - if !opts.invert.is_empty() || opts.duplicates { - graph.invert(); - } - - // Packages to prune. - let pkgs_to_prune = opts - .pkgs_to_prune - .iter() - .map(|p| PackageIdSpec::parse(p)) - .map(|r| { - // Provide an error message if pkgid is not within the resolved - // dependencies graph. - r.and_then(|spec| spec.query(ws_resolve.targeted_resolve.iter()).and(Ok(spec))) - }) - .collect::>>()?; - - print(ws.config(), opts, root_indexes, &pkgs_to_prune, &graph)?; - Ok(()) -} - -/// Prints a tree for each given root. -fn print( - config: &Config, - opts: &TreeOptions, - roots: Vec, - pkgs_to_prune: &[PackageIdSpec], - graph: &Graph<'_>, -) -> CargoResult<()> { - let format = Pattern::new(&opts.format) - .with_context(|| format!("tree format `{}` not valid", opts.format))?; - - let symbols = match opts.charset { - Charset::Utf8 => &UTF8_SYMBOLS, - Charset::Ascii => &ASCII_SYMBOLS, - }; - - // The visited deps is used to display a (*) whenever a dep has - // already been printed (ignored with --no-dedupe). - let mut visited_deps = HashSet::new(); - - for (i, root_index) in roots.into_iter().enumerate() { - if i != 0 { - drop_println!(config); - } - - // A stack of bools used to determine where | symbols should appear - // when printing a line. - let mut levels_continue = vec![]; - // The print stack is used to detect dependency cycles when - // --no-dedupe is used. It contains a Node for each level. - let mut print_stack = vec![]; - - print_node( - config, - graph, - root_index, - &format, - symbols, - pkgs_to_prune, - opts.prefix, - opts.no_dedupe, - opts.max_display_depth, - opts.no_proc_macro, - &mut visited_deps, - &mut levels_continue, - &mut print_stack, - ); - } - - Ok(()) -} - -/// Prints a package and all of its dependencies. -fn print_node<'a>( - config: &Config, - graph: &'a Graph<'_>, - node_index: usize, - format: &Pattern, - symbols: &Symbols, - pkgs_to_prune: &[PackageIdSpec], - prefix: Prefix, - no_dedupe: bool, - max_display_depth: u32, - no_proc_macro: bool, - visited_deps: &mut HashSet, - levels_continue: &mut Vec, - print_stack: &mut Vec, -) { - let new = no_dedupe || visited_deps.insert(node_index); - - match prefix { - Prefix::Depth => drop_print!(config, "{}", levels_continue.len()), - Prefix::Indent => { - if let Some((last_continues, rest)) = levels_continue.split_last() { - for continues in rest { - let c = if *continues { symbols.down } else { " " }; - drop_print!(config, "{} ", c); - } - - let c = if *last_continues { - symbols.tee - } else { - symbols.ell - }; - drop_print!(config, "{0}{1}{1} ", c, symbols.right); - } - } - Prefix::None => {} - } - - let in_cycle = print_stack.contains(&node_index); - // If this node does not have any outgoing edges, don't include the (*) - // since there isn't really anything "deduplicated", and it generally just - // adds noise. - let has_deps = graph.has_outgoing_edges(node_index); - let star = if (new && !in_cycle) || !has_deps { - "" - } else { - " (*)" - }; - drop_println!(config, "{}{}", format.display(graph, node_index), star); - - if !new || in_cycle { - return; - } - print_stack.push(node_index); - - for kind in &[ - EdgeKind::Dep(DepKind::Normal), - EdgeKind::Dep(DepKind::Build), - EdgeKind::Dep(DepKind::Development), - EdgeKind::Feature, - ] { - print_dependencies( - config, - graph, - node_index, - format, - symbols, - pkgs_to_prune, - prefix, - no_dedupe, - max_display_depth, - no_proc_macro, - visited_deps, - levels_continue, - print_stack, - kind, - ); - } - print_stack.pop(); -} - -/// Prints all the dependencies of a package for the given dependency kind. -fn print_dependencies<'a>( - config: &Config, - graph: &'a Graph<'_>, - node_index: usize, - format: &Pattern, - symbols: &Symbols, - pkgs_to_prune: &[PackageIdSpec], - prefix: Prefix, - no_dedupe: bool, - max_display_depth: u32, - no_proc_macro: bool, - visited_deps: &mut HashSet, - levels_continue: &mut Vec, - print_stack: &mut Vec, - kind: &EdgeKind, -) { - let deps = graph.connected_nodes(node_index, kind); - if deps.is_empty() { - return; - } - - let name = match kind { - EdgeKind::Dep(DepKind::Normal) => None, - EdgeKind::Dep(DepKind::Build) => Some("[build-dependencies]"), - EdgeKind::Dep(DepKind::Development) => Some("[dev-dependencies]"), - EdgeKind::Feature => None, - }; - - if let Prefix::Indent = prefix { - if let Some(name) = name { - for continues in &**levels_continue { - let c = if *continues { symbols.down } else { " " }; - drop_print!(config, "{} ", c); - } - - drop_println!(config, "{}", name); - } - } - - // Current level exceeds maximum display depth. Skip. - if levels_continue.len() + 1 > max_display_depth as usize { - return; - } - - let mut it = deps - .iter() - .filter(|dep| { - // Filter out proc-macro dependencies. - if no_proc_macro { - match graph.node(**dep) { - &Node::Package { package_id, .. } => { - !graph.package_for_id(package_id).proc_macro() - } - _ => true, - } - } else { - true - } - }) - .filter(|dep| { - // Filter out packages to prune. - match graph.node(**dep) { - Node::Package { package_id, .. } => { - !pkgs_to_prune.iter().any(|spec| spec.matches(*package_id)) - } - _ => true, - } - }) - .peekable(); - - while let Some(dependency) = it.next() { - levels_continue.push(it.peek().is_some()); - print_node( - config, - graph, - *dependency, - format, - symbols, - pkgs_to_prune, - prefix, - no_dedupe, - max_display_depth, - no_proc_macro, - visited_deps, - levels_continue, - print_stack, - ); - levels_continue.pop(); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/vendor.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/vendor.rs deleted file mode 100644 index 8ecb802d7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/ops/vendor.rs +++ /dev/null @@ -1,384 +0,0 @@ -use crate::core::shell::Verbosity; -use crate::core::{GitReference, Workspace}; -use crate::ops; -use crate::sources::path::PathSource; -use crate::sources::CRATES_IO_REGISTRY; -use crate::util::{CargoResult, Config}; -use anyhow::{bail, Context as _}; -use cargo_util::{paths, Sha256}; -use serde::Serialize; -use std::collections::HashSet; -use std::collections::{BTreeMap, BTreeSet, HashMap}; -use std::fs::{self, File, OpenOptions}; -use std::io::{Read, Write}; -use std::path::{Path, PathBuf}; - -pub struct VendorOptions<'a> { - pub no_delete: bool, - pub versioned_dirs: bool, - pub destination: &'a Path, - pub extra: Vec, -} - -pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> { - let config = ws.config(); - let mut extra_workspaces = Vec::new(); - for extra in opts.extra.iter() { - let extra = config.cwd().join(extra); - let ws = Workspace::new(&extra, config)?; - extra_workspaces.push(ws); - } - let workspaces = extra_workspaces.iter().chain(Some(ws)).collect::>(); - let vendor_config = sync(config, &workspaces, opts).with_context(|| "failed to sync")?; - - if config.shell().verbosity() != Verbosity::Quiet { - if vendor_config.source.is_empty() { - crate::drop_eprintln!(config, "There is no dependency to vendor in this project."); - } else { - crate::drop_eprint!( - config, - "To use vendored sources, add this to your .cargo/config.toml for this project:\n\n" - ); - crate::drop_print!(config, "{}", &toml::to_string(&vendor_config).unwrap()); - } - } - - Ok(()) -} - -#[derive(Serialize)] -struct VendorConfig { - source: BTreeMap, -} - -#[derive(Serialize)] -#[serde(rename_all = "lowercase", untagged)] -enum VendorSource { - Directory { - directory: PathBuf, - }, - Registry { - registry: Option, - #[serde(rename = "replace-with")] - replace_with: String, - }, - Git { - git: String, - branch: Option, - tag: Option, - rev: Option, - #[serde(rename = "replace-with")] - replace_with: String, - }, -} - -fn sync( - config: &Config, - workspaces: &[&Workspace<'_>], - opts: &VendorOptions<'_>, -) -> CargoResult { - let canonical_destination = opts.destination.canonicalize(); - let canonical_destination = canonical_destination.as_deref().unwrap_or(opts.destination); - let dest_dir_already_exists = canonical_destination.exists(); - - paths::create_dir_all(&canonical_destination)?; - let mut to_remove = HashSet::new(); - if !opts.no_delete { - for entry in canonical_destination.read_dir()? { - let entry = entry?; - if !entry - .file_name() - .to_str() - .map_or(false, |s| s.starts_with('.')) - { - to_remove.insert(entry.path()); - } - } - } - - // First up attempt to work around rust-lang/cargo#5956. Apparently build - // artifacts sprout up in Cargo's global cache for whatever reason, although - // it's unsure what tool is causing these issues at this time. For now we - // apply a heavy-hammer approach which is to delete Cargo's unpacked version - // of each crate to start off with. After we do this we'll re-resolve and - // redownload again, which should trigger Cargo to re-extract all the - // crates. - // - // Note that errors are largely ignored here as this is a best-effort - // attempt. If anything fails here we basically just move on to the next - // crate to work with. - for ws in workspaces { - let (packages, resolve) = - ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?; - - packages - .get_many(resolve.iter()) - .with_context(|| "failed to download packages")?; - - for pkg in resolve.iter() { - // Don't delete actual source code! - if pkg.source_id().is_path() { - if let Ok(path) = pkg.source_id().url().to_file_path() { - if let Ok(path) = path.canonicalize() { - to_remove.remove(&path); - } - } - continue; - } - if pkg.source_id().is_git() { - continue; - } - if let Ok(pkg) = packages.get_one(pkg) { - drop(fs::remove_dir_all(pkg.manifest_path().parent().unwrap())); - } - } - } - - let mut checksums = HashMap::new(); - let mut ids = BTreeMap::new(); - - // Next up let's actually download all crates and start storing internal - // tables about them. - for ws in workspaces { - let (packages, resolve) = - ops::resolve_ws(ws).with_context(|| "failed to load pkg lockfile")?; - - packages - .get_many(resolve.iter()) - .with_context(|| "failed to download packages")?; - - for pkg in resolve.iter() { - // No need to vendor path crates since they're already in the - // repository - if pkg.source_id().is_path() { - continue; - } - ids.insert( - pkg, - packages - .get_one(pkg) - .with_context(|| "failed to fetch package")? - .clone(), - ); - - checksums.insert(pkg, resolve.checksums().get(&pkg).cloned()); - } - } - - let mut versions = HashMap::new(); - for id in ids.keys() { - let map = versions.entry(id.name()).or_insert_with(BTreeMap::default); - if let Some(prev) = map.get(&id.version()) { - bail!( - "found duplicate version of package `{} v{}` \ - vendored from two sources:\n\ - \n\ - \tsource 1: {}\n\ - \tsource 2: {}", - id.name(), - id.version(), - prev, - id.source_id() - ); - } - map.insert(id.version(), id.source_id()); - } - - let mut sources = BTreeSet::new(); - let mut tmp_buf = [0; 64 * 1024]; - for (id, pkg) in ids.iter() { - // Next up, copy it to the vendor directory - let src = pkg - .manifest_path() - .parent() - .expect("manifest_path should point to a file"); - let max_version = *versions[&id.name()].iter().rev().next().unwrap().0; - let dir_has_version_suffix = opts.versioned_dirs || id.version() != max_version; - let dst_name = if dir_has_version_suffix { - // Eg vendor/futures-0.1.13 - format!("{}-{}", id.name(), id.version()) - } else { - // Eg vendor/futures - id.name().to_string() - }; - - sources.insert(id.source_id()); - let dst = canonical_destination.join(&dst_name); - to_remove.remove(&dst); - let cksum = dst.join(".cargo-checksum.json"); - if dir_has_version_suffix && cksum.exists() { - // Always re-copy directory without version suffix in case the version changed - continue; - } - - config.shell().status( - "Vendoring", - &format!("{} ({}) to {}", id, src.to_string_lossy(), dst.display()), - )?; - - let _ = fs::remove_dir_all(&dst); - let pathsource = PathSource::new(src, id.source_id(), config); - let paths = pathsource.list_files(pkg)?; - let mut map = BTreeMap::new(); - cp_sources(src, &paths, &dst, &mut map, &mut tmp_buf) - .with_context(|| format!("failed to copy over vendored sources for: {}", id))?; - - // Finally, emit the metadata about this package - let json = serde_json::json!({ - "package": checksums.get(id), - "files": map, - }); - - paths::write(&cksum, json.to_string())?; - } - - for path in to_remove { - if path.is_dir() { - paths::remove_dir_all(&path)?; - } else { - paths::remove_file(&path)?; - } - } - - // add our vendored source - let mut config = BTreeMap::new(); - - let merged_source_name = "vendored-sources"; - - // replace original sources with vendor - for source_id in sources { - let name = if source_id.is_default_registry() { - CRATES_IO_REGISTRY.to_string() - } else { - source_id.url().to_string() - }; - - let source = if source_id.is_default_registry() { - VendorSource::Registry { - registry: None, - replace_with: merged_source_name.to_string(), - } - } else if source_id.is_remote_registry() { - let registry = source_id.url().to_string(); - VendorSource::Registry { - registry: Some(registry), - replace_with: merged_source_name.to_string(), - } - } else if source_id.is_git() { - let mut branch = None; - let mut tag = None; - let mut rev = None; - if let Some(reference) = source_id.git_reference() { - match *reference { - GitReference::Branch(ref b) => branch = Some(b.clone()), - GitReference::Tag(ref t) => tag = Some(t.clone()), - GitReference::Rev(ref r) => rev = Some(r.clone()), - GitReference::DefaultBranch => {} - } - } - VendorSource::Git { - git: source_id.url().to_string(), - branch, - tag, - rev, - replace_with: merged_source_name.to_string(), - } - } else { - panic!("Invalid source ID: {}", source_id) - }; - config.insert(name, source); - } - - if !config.is_empty() { - config.insert( - merged_source_name.to_string(), - VendorSource::Directory { - directory: opts.destination.to_path_buf(), - }, - ); - } else if !dest_dir_already_exists { - // Nothing to vendor. Remove the destination dir we've just created. - paths::remove_dir(canonical_destination)?; - } - - Ok(VendorConfig { source: config }) -} - -fn cp_sources( - src: &Path, - paths: &[PathBuf], - dst: &Path, - cksums: &mut BTreeMap, - tmp_buf: &mut [u8], -) -> CargoResult<()> { - for p in paths { - let relative = p.strip_prefix(&src).unwrap(); - - match relative.to_str() { - // Skip git config files as they're not relevant to builds most of - // the time and if we respect them (e.g. in git) then it'll - // probably mess with the checksums when a vendor dir is checked - // into someone else's source control - Some(".gitattributes") | Some(".gitignore") | Some(".git") => continue, - - // Temporary Cargo files - Some(".cargo-ok") => continue, - - // Skip patch-style orig/rej files. Published crates on crates.io - // have `Cargo.toml.orig` which we don't want to use here and - // otherwise these are rarely used as part of the build process. - Some(filename) => { - if filename.ends_with(".orig") || filename.ends_with(".rej") { - continue; - } - } - _ => {} - }; - - // Join pathname components individually to make sure that the joined - // path uses the correct directory separators everywhere, since - // `relative` may use Unix-style and `dst` may require Windows-style - // backslashes. - let dst = relative - .iter() - .fold(dst.to_owned(), |acc, component| acc.join(&component)); - - paths::create_dir_all(dst.parent().unwrap())?; - - let cksum = copy_and_checksum(p, &dst, tmp_buf)?; - cksums.insert(relative.to_str().unwrap().replace("\\", "/"), cksum); - } - Ok(()) -} - -fn copy_and_checksum(src_path: &Path, dst_path: &Path, buf: &mut [u8]) -> CargoResult { - let mut src = File::open(src_path).with_context(|| format!("failed to open {:?}", src_path))?; - let mut dst_opts = OpenOptions::new(); - dst_opts.write(true).create(true).truncate(true); - #[cfg(unix)] - { - use std::os::unix::fs::{MetadataExt, OpenOptionsExt}; - let src_metadata = src - .metadata() - .with_context(|| format!("failed to stat {:?}", src_path))?; - dst_opts.mode(src_metadata.mode()); - } - let mut dst = dst_opts - .open(dst_path) - .with_context(|| format!("failed to create {:?}", dst_path))?; - // Not going to bother setting mode on pre-existing files, since there - // shouldn't be any under normal conditions. - let mut cksum = Sha256::new(); - loop { - let n = src - .read(buf) - .with_context(|| format!("failed to read from {:?}", src_path))?; - if n == 0 { - break Ok(cksum.finish_hex()); - } - let data = &buf[..n]; - cksum.update(data); - dst.write_all(data) - .with_context(|| format!("failed to write to {:?}", dst_path))?; - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/config.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/config.rs deleted file mode 100644 index 0e2b24efb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/config.rs +++ /dev/null @@ -1,294 +0,0 @@ -//! Implementation of configuration for various sources -//! -//! This module will parse the various `source.*` TOML configuration keys into a -//! structure usable by Cargo itself. Currently this is primarily used to map -//! sources to one another via the `replace-with` key in `.cargo/config`. - -use crate::core::{GitReference, PackageId, Source, SourceId}; -use crate::sources::{ReplacedSource, CRATES_IO_REGISTRY}; -use crate::util::config::{self, ConfigRelativePath, OptValue}; -use crate::util::errors::CargoResult; -use crate::util::{Config, IntoUrl}; -use anyhow::{bail, Context as _}; -use log::debug; -use std::collections::{HashMap, HashSet}; -use url::Url; - -#[derive(Clone)] -pub struct SourceConfigMap<'cfg> { - /// Mapping of source name to the toml configuration. - cfgs: HashMap, - /// Mapping of `SourceId` to the source name. - id2name: HashMap, - config: &'cfg Config, -} - -/// Definition of a source in a config file. -#[derive(Debug, serde::Deserialize)] -#[serde(rename_all = "kebab-case")] -struct SourceConfigDef { - /// Indicates this source should be replaced with another of the given name. - replace_with: OptValue, - /// A directory source. - directory: Option, - /// A registry source. Value is a URL. - registry: OptValue, - /// A local registry source. - local_registry: Option, - /// A git source. Value is a URL. - git: OptValue, - /// The git branch. - branch: OptValue, - /// The git tag. - tag: OptValue, - /// The git revision. - rev: OptValue, -} - -/// Configuration for a particular source, found in TOML looking like: -/// -/// ```toml -/// [source.crates-io] -/// registry = 'https://github.com/rust-lang/crates.io-index' -/// replace-with = 'foo' # optional -/// ``` -#[derive(Clone)] -struct SourceConfig { - /// `SourceId` this source corresponds to, inferred from the various - /// defined keys in the configuration. - id: SourceId, - - /// Whether or not this source is replaced with another. - /// - /// This field is a tuple of `(name, location)` where `location` is where - /// this configuration key was defined (such as the `.cargo/config` path - /// or the environment variable name). - replace_with: Option<(String, String)>, -} - -impl<'cfg> SourceConfigMap<'cfg> { - pub fn new(config: &'cfg Config) -> CargoResult> { - let mut base = SourceConfigMap::empty(config)?; - let sources: Option> = config.get("source")?; - if let Some(sources) = sources { - for (key, value) in sources.into_iter() { - base.add_config(key, value)?; - } - } - Ok(base) - } - - pub fn empty(config: &'cfg Config) -> CargoResult> { - let mut base = SourceConfigMap { - cfgs: HashMap::new(), - id2name: HashMap::new(), - config, - }; - base.add( - CRATES_IO_REGISTRY, - SourceConfig { - id: SourceId::crates_io(config)?, - replace_with: None, - }, - )?; - Ok(base) - } - - pub fn config(&self) -> &'cfg Config { - self.config - } - - /// Get the `Source` for a given `SourceId`. - pub fn load( - &self, - id: SourceId, - yanked_whitelist: &HashSet, - ) -> CargoResult> { - debug!("loading: {}", id); - - let mut name = match self.id2name.get(&id) { - Some(name) => name, - None => return id.load(self.config, yanked_whitelist), - }; - let mut cfg_loc = ""; - let orig_name = name; - let new_id; - loop { - let cfg = match self.cfgs.get(name) { - Some(cfg) => cfg, - None => bail!( - "could not find a configured source with the \ - name `{}` when attempting to lookup `{}` \ - (configuration in `{}`)", - name, - orig_name, - cfg_loc - ), - }; - match &cfg.replace_with { - Some((s, c)) => { - name = s; - cfg_loc = c; - } - None if id == cfg.id => return id.load(self.config, yanked_whitelist), - None => { - new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string())); - break; - } - } - debug!("following pointer to {}", name); - if name == orig_name { - bail!( - "detected a cycle of `replace-with` sources, the source \ - `{}` is eventually replaced with itself \ - (configuration in `{}`)", - name, - cfg_loc - ) - } - } - - let new_src = new_id.load( - self.config, - &yanked_whitelist - .iter() - .map(|p| p.map_source(id, new_id)) - .collect(), - )?; - let old_src = id.load(self.config, yanked_whitelist)?; - if !new_src.supports_checksums() && old_src.supports_checksums() { - bail!( - "\ -cannot replace `{orig}` with `{name}`, the source `{orig}` supports \ -checksums, but `{name}` does not - -a lock file compatible with `{orig}` cannot be generated in this situation -", - orig = orig_name, - name = name - ); - } - - if old_src.requires_precise() && id.precise().is_none() { - bail!( - "\ -the source {orig} requires a lock file to be present first before it can be -used against vendored source code - -remove the source replacement configuration, generate a lock file, and then -restore the source replacement configuration to continue the build -", - orig = orig_name - ); - } - - Ok(Box::new(ReplacedSource::new(id, new_id, new_src))) - } - - fn add(&mut self, name: &str, cfg: SourceConfig) -> CargoResult<()> { - if let Some(old_name) = self.id2name.insert(cfg.id, name.to_string()) { - // The user is allowed to redefine the built-in crates-io - // definition from `empty()`. - if name != CRATES_IO_REGISTRY { - bail!( - "source `{}` defines source {}, but that source is already defined by `{}`\n\ - note: Sources are not allowed to be defined multiple times.", - name, - cfg.id, - old_name - ); - } - } - self.cfgs.insert(name.to_string(), cfg); - Ok(()) - } - - fn add_config(&mut self, name: String, def: SourceConfigDef) -> CargoResult<()> { - let mut srcs = Vec::new(); - if let Some(registry) = def.registry { - let url = url(®istry, &format!("source.{}.registry", name))?; - srcs.push(SourceId::for_alt_registry(&url, &name)?); - } - if let Some(local_registry) = def.local_registry { - let path = local_registry.resolve_path(self.config); - srcs.push(SourceId::for_local_registry(&path)?); - } - if let Some(directory) = def.directory { - let path = directory.resolve_path(self.config); - srcs.push(SourceId::for_directory(&path)?); - } - if let Some(git) = def.git { - let url = url(&git, &format!("source.{}.git", name))?; - let reference = match def.branch { - Some(b) => GitReference::Branch(b.val), - None => match def.tag { - Some(b) => GitReference::Tag(b.val), - None => match def.rev { - Some(b) => GitReference::Rev(b.val), - None => GitReference::DefaultBranch, - }, - }, - }; - srcs.push(SourceId::for_git(&url, reference)?); - } else { - let check_not_set = |key, v: OptValue| { - if let Some(val) = v { - bail!( - "source definition `source.{}` specifies `{}`, \ - but that requires a `git` key to be specified (in {})", - name, - key, - val.definition - ); - } - Ok(()) - }; - check_not_set("branch", def.branch)?; - check_not_set("tag", def.tag)?; - check_not_set("rev", def.rev)?; - } - if name == CRATES_IO_REGISTRY && srcs.is_empty() { - srcs.push(SourceId::crates_io(self.config)?); - } - - match srcs.len() { - 0 => bail!( - "no source location specified for `source.{}`, need \ - `registry`, `local-registry`, `directory`, or `git` defined", - name - ), - 1 => {} - _ => bail!( - "more than one source location specified for `source.{}`", - name - ), - } - let src = srcs[0]; - - let replace_with = def - .replace_with - .map(|val| (val.val, val.definition.to_string())); - - self.add( - &name, - SourceConfig { - id: src, - replace_with, - }, - )?; - - return Ok(()); - - fn url(val: &config::Value, key: &str) -> CargoResult { - let url = val.val.into_url().with_context(|| { - format!( - "configuration key `{}` specified an invalid \ - URL (in {})", - key, val.definition - ) - })?; - - Ok(url) - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/directory.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/directory.rs deleted file mode 100644 index 7a00b560f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/directory.rs +++ /dev/null @@ -1,208 +0,0 @@ -use std::collections::HashMap; -use std::fmt::{self, Debug, Formatter}; -use std::path::{Path, PathBuf}; - -use crate::core::source::MaybePackage; -use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; -use crate::sources::PathSource; -use crate::util::errors::CargoResult; -use crate::util::Config; - -use anyhow::Context as _; -use cargo_util::{paths, Sha256}; -use serde::Deserialize; - -pub struct DirectorySource<'cfg> { - source_id: SourceId, - root: PathBuf, - packages: HashMap, - config: &'cfg Config, -} - -#[derive(Deserialize)] -struct Checksum { - package: Option, - files: HashMap, -} - -impl<'cfg> DirectorySource<'cfg> { - pub fn new(path: &Path, id: SourceId, config: &'cfg Config) -> DirectorySource<'cfg> { - DirectorySource { - source_id: id, - root: path.to_path_buf(), - config, - packages: HashMap::new(), - } - } -} - -impl<'cfg> Debug for DirectorySource<'cfg> { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "DirectorySource {{ root: {:?} }}", self.root) - } -} - -impl<'cfg> Source for DirectorySource<'cfg> { - fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - let packages = self.packages.values().map(|p| &p.0); - let matches = packages.filter(|pkg| dep.matches(pkg.summary())); - for summary in matches.map(|pkg| pkg.summary().clone()) { - f(summary); - } - Ok(()) - } - - fn fuzzy_query(&mut self, _dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - let packages = self.packages.values().map(|p| &p.0); - for summary in packages.map(|pkg| pkg.summary().clone()) { - f(summary); - } - Ok(()) - } - - fn supports_checksums(&self) -> bool { - true - } - - fn requires_precise(&self) -> bool { - true - } - - fn source_id(&self) -> SourceId { - self.source_id - } - - fn update(&mut self) -> CargoResult<()> { - self.packages.clear(); - let entries = self.root.read_dir().with_context(|| { - format!( - "failed to read root of directory source: {}", - self.root.display() - ) - })?; - - for entry in entries { - let entry = entry?; - let path = entry.path(); - - // Ignore hidden/dot directories as they typically don't contain - // crates and otherwise may conflict with a VCS - // (rust-lang/cargo#3414). - if let Some(s) = path.file_name().and_then(|s| s.to_str()) { - if s.starts_with('.') { - continue; - } - } - - // Vendor directories are often checked into a VCS, but throughout - // the lifetime of a vendor dir crates are often added and deleted. - // Some VCS implementations don't always fully delete the directory - // when a dir is removed from a different checkout. Sometimes a - // mostly-empty dir is left behind. - // - // Additionally vendor directories are sometimes accompanied with - // readme files and other auxiliary information not too interesting - // to Cargo. - // - // To help handle all this we only try processing folders with a - // `Cargo.toml` in them. This has the upside of being pretty - // flexible with the contents of vendor directories but has the - // downside of accidentally misconfigured vendor directories - // silently returning less crates. - if !path.join("Cargo.toml").exists() { - continue; - } - - let mut src = PathSource::new(&path, self.source_id, self.config); - src.update()?; - let mut pkg = src.root_package()?; - - let cksum_file = path.join(".cargo-checksum.json"); - let cksum = paths::read(&path.join(cksum_file)).with_context(|| { - format!( - "failed to load checksum `.cargo-checksum.json` \ - of {} v{}", - pkg.package_id().name(), - pkg.package_id().version() - ) - })?; - let cksum: Checksum = serde_json::from_str(&cksum).with_context(|| { - format!( - "failed to decode `.cargo-checksum.json` of \ - {} v{}", - pkg.package_id().name(), - pkg.package_id().version() - ) - })?; - - if let Some(package) = &cksum.package { - pkg.manifest_mut() - .summary_mut() - .set_checksum(package.clone()); - } - self.packages.insert(pkg.package_id(), (pkg, cksum)); - } - - Ok(()) - } - - fn download(&mut self, id: PackageId) -> CargoResult { - self.packages - .get(&id) - .map(|p| &p.0) - .cloned() - .map(MaybePackage::Ready) - .ok_or_else(|| anyhow::format_err!("failed to find package with id: {}", id)) - } - - fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { - panic!("no downloads to do") - } - - fn fingerprint(&self, pkg: &Package) -> CargoResult { - Ok(pkg.package_id().version().to_string()) - } - - fn verify(&self, id: PackageId) -> CargoResult<()> { - let (pkg, cksum) = match self.packages.get(&id) { - Some(&(ref pkg, ref cksum)) => (pkg, cksum), - None => anyhow::bail!("failed to find entry for `{}` in directory source", id), - }; - - for (file, cksum) in cksum.files.iter() { - let file = pkg.root().join(file); - let actual = Sha256::new() - .update_path(&file) - .with_context(|| format!("failed to calculate checksum of: {}", file.display()))? - .finish_hex(); - if &*actual != cksum { - anyhow::bail!( - "the listed checksum of `{}` has changed:\n\ - expected: {}\n\ - actual: {}\n\ - \n\ - directory sources are not intended to be edited, if \ - modifications are required then it is recommended \ - that `[patch]` is used with a forked copy of the \ - source\ - ", - file.display(), - cksum, - actual - ); - } - } - - Ok(()) - } - - fn describe(&self) -> String { - format!("directory source `{}`", self.root.display()) - } - - fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} - - fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult { - Ok(false) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/mod.rs deleted file mode 100644 index b32dbb17b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub use self::source::GitSource; -pub use self::utils::{fetch, GitCheckout, GitDatabase, GitRemote}; -mod source; -mod utils; diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/source.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/source.rs deleted file mode 100644 index 9d7c42b82..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/source.rs +++ /dev/null @@ -1,266 +0,0 @@ -use crate::core::source::{MaybePackage, Source, SourceId}; -use crate::core::GitReference; -use crate::core::{Dependency, Package, PackageId, Summary}; -use crate::sources::git::utils::GitRemote; -use crate::sources::PathSource; -use crate::util::errors::CargoResult; -use crate::util::hex::short_hash; -use crate::util::Config; -use anyhow::Context; -use log::trace; -use std::fmt::{self, Debug, Formatter}; -use url::Url; - -pub struct GitSource<'cfg> { - remote: GitRemote, - manifest_reference: GitReference, - locked_rev: Option, - source_id: SourceId, - path_source: Option>, - ident: String, - config: &'cfg Config, -} - -impl<'cfg> GitSource<'cfg> { - pub fn new(source_id: SourceId, config: &'cfg Config) -> CargoResult> { - assert!(source_id.is_git(), "id is not git, id={}", source_id); - - let remote = GitRemote::new(source_id.url()); - let ident = ident(&source_id); - - let source = GitSource { - remote, - manifest_reference: source_id.git_reference().unwrap().clone(), - locked_rev: match source_id.precise() { - Some(s) => Some(git2::Oid::from_str(s).with_context(|| { - format!("precise value for git is not a git revision: {}", s) - })?), - None => None, - }, - source_id, - path_source: None, - ident, - config, - }; - - Ok(source) - } - - pub fn url(&self) -> &Url { - self.remote.url() - } - - pub fn read_packages(&mut self) -> CargoResult> { - if self.path_source.is_none() { - self.update()?; - } - self.path_source.as_mut().unwrap().read_packages() - } -} - -fn ident(id: &SourceId) -> String { - let ident = id - .canonical_url() - .raw_canonicalized_url() - .path_segments() - .and_then(|s| s.rev().next()) - .unwrap_or(""); - - let ident = if ident.is_empty() { "_empty" } else { ident }; - - format!("{}-{}", ident, short_hash(id.canonical_url())) -} - -impl<'cfg> Debug for GitSource<'cfg> { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "git repo at {}", self.remote.url())?; - - match self.manifest_reference.pretty_ref() { - Some(s) => write!(f, " ({})", s), - None => Ok(()), - } - } -} - -impl<'cfg> Source for GitSource<'cfg> { - fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - let src = self - .path_source - .as_mut() - .expect("BUG: `update()` must be called before `query()`"); - src.query(dep, f) - } - - fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - let src = self - .path_source - .as_mut() - .expect("BUG: `update()` must be called before `query()`"); - src.fuzzy_query(dep, f) - } - - fn supports_checksums(&self) -> bool { - false - } - - fn requires_precise(&self) -> bool { - true - } - - fn source_id(&self) -> SourceId { - self.source_id - } - - fn update(&mut self) -> CargoResult<()> { - let git_path = self.config.git_path(); - let git_path = self.config.assert_package_cache_locked(&git_path); - let db_path = git_path.join("db").join(&self.ident); - - let db = self.remote.db_at(&db_path).ok(); - let (db, actual_rev) = match (self.locked_rev, db) { - // If we have a locked revision, and we have a preexisting database - // which has that revision, then no update needs to happen. - (Some(rev), Some(db)) if db.contains(rev) => (db, rev), - - // If we're in offline mode, we're not locked, and we have a - // database, then try to resolve our reference with the preexisting - // repository. - (None, Some(db)) if self.config.offline() => { - let rev = db.resolve(&self.manifest_reference).with_context(|| { - "failed to lookup reference in preexisting repository, and \ - can't check for updates in offline mode (--offline)" - })?; - (db, rev) - } - - // ... otherwise we use this state to update the git database. Note - // that we still check for being offline here, for example in the - // situation that we have a locked revision but the database - // doesn't have it. - (locked_rev, db) => { - if self.config.offline() { - anyhow::bail!( - "can't checkout from '{}': you are in the offline mode (--offline)", - self.remote.url() - ); - } - self.config.shell().status( - "Updating", - format!("git repository `{}`", self.remote.url()), - )?; - - trace!("updating git source `{:?}`", self.remote); - - self.remote.checkout( - &db_path, - db, - &self.manifest_reference, - locked_rev, - self.config, - )? - } - }; - - // Donโ€™t use the full hash, in order to contribute less to reaching the - // path length limit on Windows. See - // . - let short_id = db.to_short_id(actual_rev)?; - - // Check out `actual_rev` from the database to a scoped location on the - // filesystem. This will use hard links and such to ideally make the - // checkout operation here pretty fast. - let checkout_path = git_path - .join("checkouts") - .join(&self.ident) - .join(short_id.as_str()); - db.copy_to(actual_rev, &checkout_path, self.config)?; - - let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); - let path_source = PathSource::new_recursive(&checkout_path, source_id, self.config); - - self.path_source = Some(path_source); - self.locked_rev = Some(actual_rev); - self.path_source.as_mut().unwrap().update() - } - - fn download(&mut self, id: PackageId) -> CargoResult { - trace!( - "getting packages for package ID `{}` from `{:?}`", - id, - self.remote - ); - self.path_source - .as_mut() - .expect("BUG: `update()` must be called before `get()`") - .download(id) - } - - fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { - panic!("no download should have started") - } - - fn fingerprint(&self, _pkg: &Package) -> CargoResult { - Ok(self.locked_rev.as_ref().unwrap().to_string()) - } - - fn describe(&self) -> String { - format!("Git repository {}", self.source_id) - } - - fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} - - fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult { - Ok(false) - } -} - -#[cfg(test)] -mod test { - use super::ident; - use crate::core::{GitReference, SourceId}; - use crate::util::IntoUrl; - - #[test] - pub fn test_url_to_path_ident_with_path() { - let ident = ident(&src("https://github.com/carlhuda/cargo")); - assert!(ident.starts_with("cargo-")); - } - - #[test] - pub fn test_url_to_path_ident_without_path() { - let ident = ident(&src("https://github.com")); - assert!(ident.starts_with("_empty-")); - } - - #[test] - fn test_canonicalize_idents_by_stripping_trailing_url_slash() { - let ident1 = ident(&src("https://github.com/PistonDevelopers/piston/")); - let ident2 = ident(&src("https://github.com/PistonDevelopers/piston")); - assert_eq!(ident1, ident2); - } - - #[test] - fn test_canonicalize_idents_by_lowercasing_github_urls() { - let ident1 = ident(&src("https://github.com/PistonDevelopers/piston")); - let ident2 = ident(&src("https://github.com/pistondevelopers/piston")); - assert_eq!(ident1, ident2); - } - - #[test] - fn test_canonicalize_idents_by_stripping_dot_git() { - let ident1 = ident(&src("https://github.com/PistonDevelopers/piston")); - let ident2 = ident(&src("https://github.com/PistonDevelopers/piston.git")); - assert_eq!(ident1, ident2); - } - - #[test] - fn test_canonicalize_idents_different_protocols() { - let ident1 = ident(&src("https://github.com/PistonDevelopers/piston")); - let ident2 = ident(&src("git://github.com/PistonDevelopers/piston")); - assert_eq!(ident1, ident2); - } - - fn src(s: &str) -> SourceId { - SourceId::for_git(&s.into_url().unwrap(), GitReference::DefaultBranch).unwrap() - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/utils.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/utils.rs deleted file mode 100644 index 8f73a1a12..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/git/utils.rs +++ /dev/null @@ -1,1079 +0,0 @@ -//! Utilities for handling git repositories, mainly around -//! authentication/cloning. - -use crate::core::GitReference; -use crate::util::errors::CargoResult; -use crate::util::{network, Config, IntoUrl, MetricsCounter, Progress}; -use anyhow::{anyhow, Context as _}; -use cargo_util::{paths, ProcessBuilder}; -use curl::easy::List; -use git2::{self, ErrorClass, ObjectType}; -use log::{debug, info}; -use serde::ser; -use serde::Serialize; -use std::env; -use std::fmt; -use std::path::{Path, PathBuf}; -use std::process::Command; -use std::time::{Duration, Instant}; -use url::Url; - -fn serialize_str(t: &T, s: S) -> Result -where - T: fmt::Display, - S: ser::Serializer, -{ - s.collect_str(t) -} - -pub struct GitShortID(git2::Buf); - -impl GitShortID { - pub fn as_str(&self) -> &str { - self.0.as_str().unwrap() - } -} - -/// `GitRemote` represents a remote repository. It gets cloned into a local -/// `GitDatabase`. -#[derive(PartialEq, Clone, Debug, Serialize)] -pub struct GitRemote { - #[serde(serialize_with = "serialize_str")] - url: Url, -} - -/// `GitDatabase` is a local clone of a remote repository's database. Multiple -/// `GitCheckouts` can be cloned from this `GitDatabase`. -#[derive(Serialize)] -pub struct GitDatabase { - remote: GitRemote, - path: PathBuf, - #[serde(skip_serializing)] - repo: git2::Repository, -} - -/// `GitCheckout` is a local checkout of a particular revision. Calling -/// `clone_into` with a reference will resolve the reference into a revision, -/// and return an `anyhow::Error` if no revision for that reference was found. -#[derive(Serialize)] -pub struct GitCheckout<'a> { - database: &'a GitDatabase, - location: PathBuf, - #[serde(serialize_with = "serialize_str")] - revision: git2::Oid, - #[serde(skip_serializing)] - repo: git2::Repository, -} - -// Implementations - -impl GitRemote { - pub fn new(url: &Url) -> GitRemote { - GitRemote { url: url.clone() } - } - - pub fn url(&self) -> &Url { - &self.url - } - - pub fn rev_for(&self, path: &Path, reference: &GitReference) -> CargoResult { - reference.resolve(&self.db_at(path)?.repo) - } - - pub fn checkout( - &self, - into: &Path, - db: Option, - reference: &GitReference, - locked_rev: Option, - cargo_config: &Config, - ) -> CargoResult<(GitDatabase, git2::Oid)> { - // If we have a previous instance of `GitDatabase` then fetch into that - // if we can. If that can successfully load our revision then we've - // populated the database with the latest version of `reference`, so - // return that database and the rev we resolve to. - if let Some(mut db) = db { - fetch(&mut db.repo, self.url.as_str(), reference, cargo_config) - .context(format!("failed to fetch into: {}", into.display()))?; - match locked_rev { - Some(rev) => { - if db.contains(rev) { - return Ok((db, rev)); - } - } - None => { - if let Ok(rev) = reference.resolve(&db.repo) { - return Ok((db, rev)); - } - } - } - } - - // Otherwise start from scratch to handle corrupt git repositories. - // After our fetch (which is interpreted as a clone now) we do the same - // resolution to figure out what we cloned. - if into.exists() { - paths::remove_dir_all(into)?; - } - paths::create_dir_all(into)?; - let mut repo = init(into, true)?; - fetch(&mut repo, self.url.as_str(), reference, cargo_config) - .context(format!("failed to clone into: {}", into.display()))?; - let rev = match locked_rev { - Some(rev) => rev, - None => reference.resolve(&repo)?, - }; - - Ok(( - GitDatabase { - remote: self.clone(), - path: into.to_path_buf(), - repo, - }, - rev, - )) - } - - pub fn db_at(&self, db_path: &Path) -> CargoResult { - let repo = git2::Repository::open(db_path)?; - Ok(GitDatabase { - remote: self.clone(), - path: db_path.to_path_buf(), - repo, - }) - } -} - -impl GitDatabase { - pub fn copy_to( - &self, - rev: git2::Oid, - dest: &Path, - cargo_config: &Config, - ) -> CargoResult> { - let mut checkout = None; - if let Ok(repo) = git2::Repository::open(dest) { - let mut co = GitCheckout::new(dest, self, rev, repo); - if !co.is_fresh() { - // After a successful fetch operation the subsequent reset can - // fail sometimes for corrupt repositories where the fetch - // operation succeeds but the object isn't actually there in one - // way or another. In these situations just skip the error and - // try blowing away the whole repository and trying with a - // clone. - co.fetch(cargo_config)?; - match co.reset(cargo_config) { - Ok(()) => { - assert!(co.is_fresh()); - checkout = Some(co); - } - Err(e) => debug!("failed reset after fetch {:?}", e), - } - } else { - checkout = Some(co); - } - }; - let checkout = match checkout { - Some(c) => c, - None => GitCheckout::clone_into(dest, self, rev, cargo_config)?, - }; - checkout.update_submodules(cargo_config)?; - Ok(checkout) - } - - pub fn to_short_id(&self, revision: git2::Oid) -> CargoResult { - let obj = self.repo.find_object(revision, None)?; - Ok(GitShortID(obj.short_id()?)) - } - - pub fn contains(&self, oid: git2::Oid) -> bool { - self.repo.revparse_single(&oid.to_string()).is_ok() - } - - pub fn resolve(&self, r: &GitReference) -> CargoResult { - r.resolve(&self.repo) - } -} - -impl GitReference { - pub fn resolve(&self, repo: &git2::Repository) -> CargoResult { - let id = match self { - // Note that we resolve the named tag here in sync with where it's - // fetched into via `fetch` below. - GitReference::Tag(s) => (|| -> CargoResult { - let refname = format!("refs/remotes/origin/tags/{}", s); - let id = repo.refname_to_id(&refname)?; - let obj = repo.find_object(id, None)?; - let obj = obj.peel(ObjectType::Commit)?; - Ok(obj.id()) - })() - .with_context(|| format!("failed to find tag `{}`", s))?, - - // Resolve the remote name since that's all we're configuring in - // `fetch` below. - GitReference::Branch(s) => { - let name = format!("origin/{}", s); - let b = repo - .find_branch(&name, git2::BranchType::Remote) - .with_context(|| format!("failed to find branch `{}`", s))?; - b.get() - .target() - .ok_or_else(|| anyhow::format_err!("branch `{}` did not have a target", s))? - } - - // We'll be using the HEAD commit - GitReference::DefaultBranch => { - let head_id = repo.refname_to_id("refs/remotes/origin/HEAD")?; - let head = repo.find_object(head_id, None)?; - head.peel(ObjectType::Commit)?.id() - } - - GitReference::Rev(s) => { - let obj = repo.revparse_single(s)?; - match obj.as_tag() { - Some(tag) => tag.target_id(), - None => obj.id(), - } - } - }; - Ok(id) - } -} - -impl<'a> GitCheckout<'a> { - fn new( - path: &Path, - database: &'a GitDatabase, - revision: git2::Oid, - repo: git2::Repository, - ) -> GitCheckout<'a> { - GitCheckout { - location: path.to_path_buf(), - database, - revision, - repo, - } - } - - fn clone_into( - into: &Path, - database: &'a GitDatabase, - revision: git2::Oid, - config: &Config, - ) -> CargoResult> { - let dirname = into.parent().unwrap(); - paths::create_dir_all(&dirname)?; - if into.exists() { - paths::remove_dir_all(into)?; - } - - // we're doing a local filesystem-to-filesystem clone so there should - // be no need to respect global configuration options, so pass in - // an empty instance of `git2::Config` below. - let git_config = git2::Config::new()?; - - // Clone the repository, but make sure we use the "local" option in - // libgit2 which will attempt to use hardlinks to set up the database. - // This should speed up the clone operation quite a bit if it works. - // - // Note that we still use the same fetch options because while we don't - // need authentication information we may want progress bars and such. - let url = database.path.into_url()?; - let mut repo = None; - with_fetch_options(&git_config, url.as_str(), config, &mut |fopts| { - let mut checkout = git2::build::CheckoutBuilder::new(); - checkout.dry_run(); // we'll do this below during a `reset` - - let r = git2::build::RepoBuilder::new() - // use hard links and/or copy the database, we're doing a - // filesystem clone so this'll speed things up quite a bit. - .clone_local(git2::build::CloneLocal::Local) - .with_checkout(checkout) - .fetch_options(fopts) - .clone(url.as_str(), into)?; - repo = Some(r); - Ok(()) - })?; - let repo = repo.unwrap(); - - let checkout = GitCheckout::new(into, database, revision, repo); - checkout.reset(config)?; - Ok(checkout) - } - - fn is_fresh(&self) -> bool { - match self.repo.revparse_single("HEAD") { - Ok(ref head) if head.id() == self.revision => { - // See comments in reset() for why we check this - self.location.join(".cargo-ok").exists() - } - _ => false, - } - } - - fn fetch(&mut self, cargo_config: &Config) -> CargoResult<()> { - info!("fetch {}", self.repo.path().display()); - let url = self.database.path.into_url()?; - let reference = GitReference::Rev(self.revision.to_string()); - fetch(&mut self.repo, url.as_str(), &reference, cargo_config)?; - Ok(()) - } - - fn reset(&self, config: &Config) -> CargoResult<()> { - // If we're interrupted while performing this reset (e.g., we die because - // of a signal) Cargo needs to be sure to try to check out this repo - // again on the next go-round. - // - // To enable this we have a dummy file in our checkout, .cargo-ok, which - // if present means that the repo has been successfully reset and is - // ready to go. Hence if we start to do a reset, we make sure this file - // *doesn't* exist, and then once we're done we create the file. - let ok_file = self.location.join(".cargo-ok"); - let _ = paths::remove_file(&ok_file); - info!("reset {} to {}", self.repo.path().display(), self.revision); - - // Ensure libgit2 won't mess with newlines when we vendor. - if let Ok(mut git_config) = self.repo.config() { - git_config.set_bool("core.autocrlf", false)?; - } - - let object = self.repo.find_object(self.revision, None)?; - reset(&self.repo, &object, config)?; - paths::create(ok_file)?; - Ok(()) - } - - fn update_submodules(&self, cargo_config: &Config) -> CargoResult<()> { - return update_submodules(&self.repo, cargo_config); - - fn update_submodules(repo: &git2::Repository, cargo_config: &Config) -> CargoResult<()> { - info!("update submodules for: {:?}", repo.workdir().unwrap()); - - for mut child in repo.submodules()? { - update_submodule(repo, &mut child, cargo_config).with_context(|| { - format!( - "failed to update submodule `{}`", - child.name().unwrap_or("") - ) - })?; - } - Ok(()) - } - - fn update_submodule( - parent: &git2::Repository, - child: &mut git2::Submodule<'_>, - cargo_config: &Config, - ) -> CargoResult<()> { - child.init(false)?; - let url = child.url().ok_or_else(|| { - anyhow::format_err!("non-utf8 url for submodule {:?}?", child.path()) - })?; - - // A submodule which is listed in .gitmodules but not actually - // checked out will not have a head id, so we should ignore it. - let head = match child.head_id() { - Some(head) => head, - None => return Ok(()), - }; - - // If the submodule hasn't been checked out yet, we need to - // clone it. If it has been checked out and the head is the same - // as the submodule's head, then we can skip an update and keep - // recursing. - let head_and_repo = child.open().and_then(|repo| { - let target = repo.head()?.target(); - Ok((target, repo)) - }); - let mut repo = match head_and_repo { - Ok((head, repo)) => { - if child.head_id() == head { - return update_submodules(&repo, cargo_config); - } - repo - } - Err(..) => { - let path = parent.workdir().unwrap().join(child.path()); - let _ = paths::remove_dir_all(&path); - init(&path, false)? - } - }; - // Fetch data from origin and reset to the head commit - let reference = GitReference::Rev(head.to_string()); - cargo_config - .shell() - .status("Updating", format!("git submodule `{}`", url))?; - fetch(&mut repo, url, &reference, cargo_config).with_context(|| { - format!( - "failed to fetch submodule `{}` from {}", - child.name().unwrap_or(""), - url - ) - })?; - - let obj = repo.find_object(head, None)?; - reset(&repo, &obj, cargo_config)?; - update_submodules(&repo, cargo_config) - } - } -} - -/// Prepare the authentication callbacks for cloning a git repository. -/// -/// The main purpose of this function is to construct the "authentication -/// callback" which is used to clone a repository. This callback will attempt to -/// find the right authentication on the system (without user input) and will -/// guide libgit2 in doing so. -/// -/// The callback is provided `allowed` types of credentials, and we try to do as -/// much as possible based on that: -/// -/// * Prioritize SSH keys from the local ssh agent as they're likely the most -/// reliable. The username here is prioritized from the credential -/// callback, then from whatever is configured in git itself, and finally -/// we fall back to the generic user of `git`. -/// -/// * If a username/password is allowed, then we fallback to git2-rs's -/// implementation of the credential helper. This is what is configured -/// with `credential.helper` in git, and is the interface for the macOS -/// keychain, for example. -/// -/// * After the above two have failed, we just kinda grapple attempting to -/// return *something*. -/// -/// If any form of authentication fails, libgit2 will repeatedly ask us for -/// credentials until we give it a reason to not do so. To ensure we don't -/// just sit here looping forever we keep track of authentications we've -/// attempted and we don't try the same ones again. -fn with_authentication(url: &str, cfg: &git2::Config, mut f: F) -> CargoResult -where - F: FnMut(&mut git2::Credentials<'_>) -> CargoResult, -{ - let mut cred_helper = git2::CredentialHelper::new(url); - cred_helper.config(cfg); - - let mut ssh_username_requested = false; - let mut cred_helper_bad = None; - let mut ssh_agent_attempts = Vec::new(); - let mut any_attempts = false; - let mut tried_sshkey = false; - let mut url_attempt = None; - - let orig_url = url; - let mut res = f(&mut |url, username, allowed| { - any_attempts = true; - if url != orig_url { - url_attempt = Some(url.to_string()); - } - // libgit2's "USERNAME" authentication actually means that it's just - // asking us for a username to keep going. This is currently only really - // used for SSH authentication and isn't really an authentication type. - // The logic currently looks like: - // - // let user = ...; - // if (user.is_null()) - // user = callback(USERNAME, null, ...); - // - // callback(SSH_KEY, user, ...) - // - // So if we're being called here then we know that (a) we're using ssh - // authentication and (b) no username was specified in the URL that - // we're trying to clone. We need to guess an appropriate username here, - // but that may involve a few attempts. Unfortunately we can't switch - // usernames during one authentication session with libgit2, so to - // handle this we bail out of this authentication session after setting - // the flag `ssh_username_requested`, and then we handle this below. - if allowed.contains(git2::CredentialType::USERNAME) { - debug_assert!(username.is_none()); - ssh_username_requested = true; - return Err(git2::Error::from_str("gonna try usernames later")); - } - - // An "SSH_KEY" authentication indicates that we need some sort of SSH - // authentication. This can currently either come from the ssh-agent - // process or from a raw in-memory SSH key. Cargo only supports using - // ssh-agent currently. - // - // If we get called with this then the only way that should be possible - // is if a username is specified in the URL itself (e.g., `username` is - // Some), hence the unwrap() here. We try custom usernames down below. - if allowed.contains(git2::CredentialType::SSH_KEY) && !tried_sshkey { - // If ssh-agent authentication fails, libgit2 will keep - // calling this callback asking for other authentication - // methods to try. Make sure we only try ssh-agent once, - // to avoid looping forever. - tried_sshkey = true; - let username = username.unwrap(); - debug_assert!(!ssh_username_requested); - ssh_agent_attempts.push(username.to_string()); - return git2::Cred::ssh_key_from_agent(username); - } - - // Sometimes libgit2 will ask for a username/password in plaintext. This - // is where Cargo would have an interactive prompt if we supported it, - // but we currently don't! Right now the only way we support fetching a - // plaintext password is through the `credential.helper` support, so - // fetch that here. - // - // If ssh-agent authentication fails, libgit2 will keep calling this - // callback asking for other authentication methods to try. Check - // cred_helper_bad to make sure we only try the git credentail helper - // once, to avoid looping forever. - if allowed.contains(git2::CredentialType::USER_PASS_PLAINTEXT) && cred_helper_bad.is_none() - { - let r = git2::Cred::credential_helper(cfg, url, username); - cred_helper_bad = Some(r.is_err()); - return r; - } - - // I'm... not sure what the DEFAULT kind of authentication is, but seems - // easy to support? - if allowed.contains(git2::CredentialType::DEFAULT) { - return git2::Cred::default(); - } - - // Whelp, we tried our best - Err(git2::Error::from_str("no authentication available")) - }); - - // Ok, so if it looks like we're going to be doing ssh authentication, we - // want to try a few different usernames as one wasn't specified in the URL - // for us to use. In order, we'll try: - // - // * A credential helper's username for this URL, if available. - // * This account's username. - // * "git" - // - // We have to restart the authentication session each time (due to - // constraints in libssh2 I guess? maybe this is inherent to ssh?), so we - // call our callback, `f`, in a loop here. - if ssh_username_requested { - debug_assert!(res.is_err()); - let mut attempts = vec![String::from("git")]; - if let Ok(s) = env::var("USER").or_else(|_| env::var("USERNAME")) { - attempts.push(s); - } - if let Some(ref s) = cred_helper.username { - attempts.push(s.clone()); - } - - while let Some(s) = attempts.pop() { - // We should get `USERNAME` first, where we just return our attempt, - // and then after that we should get `SSH_KEY`. If the first attempt - // fails we'll get called again, but we don't have another option so - // we bail out. - let mut attempts = 0; - res = f(&mut |_url, username, allowed| { - if allowed.contains(git2::CredentialType::USERNAME) { - return git2::Cred::username(&s); - } - if allowed.contains(git2::CredentialType::SSH_KEY) { - debug_assert_eq!(Some(&s[..]), username); - attempts += 1; - if attempts == 1 { - ssh_agent_attempts.push(s.to_string()); - return git2::Cred::ssh_key_from_agent(&s); - } - } - Err(git2::Error::from_str("no authentication available")) - }); - - // If we made two attempts then that means: - // - // 1. A username was requested, we returned `s`. - // 2. An ssh key was requested, we returned to look up `s` in the - // ssh agent. - // 3. For whatever reason that lookup failed, so we were asked again - // for another mode of authentication. - // - // Essentially, if `attempts == 2` then in theory the only error was - // that this username failed to authenticate (e.g., no other network - // errors happened). Otherwise something else is funny so we bail - // out. - if attempts != 2 { - break; - } - } - } - let mut err = match res { - Ok(e) => return Ok(e), - Err(e) => e, - }; - - // In the case of an authentication failure (where we tried something) then - // we try to give a more helpful error message about precisely what we - // tried. - if any_attempts { - let mut msg = "failed to authenticate when downloading \ - repository" - .to_string(); - - if let Some(attempt) = &url_attempt { - if url != attempt { - msg.push_str(": "); - msg.push_str(attempt); - } - } - msg.push('\n'); - if !ssh_agent_attempts.is_empty() { - let names = ssh_agent_attempts - .iter() - .map(|s| format!("`{}`", s)) - .collect::>() - .join(", "); - msg.push_str(&format!( - "\n* attempted ssh-agent authentication, but \ - no usernames succeeded: {}", - names - )); - } - if let Some(failed_cred_helper) = cred_helper_bad { - if failed_cred_helper { - msg.push_str( - "\n* attempted to find username/password via \ - git's `credential.helper` support, but failed", - ); - } else { - msg.push_str( - "\n* attempted to find username/password via \ - `credential.helper`, but maybe the found \ - credentials were incorrect", - ); - } - } - msg.push_str("\n\n"); - msg.push_str("if the git CLI succeeds then `net.git-fetch-with-cli` may help here\n"); - msg.push_str("https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli"); - err = err.context(msg); - - // Otherwise if we didn't even get to the authentication phase them we may - // have failed to set up a connection, in these cases hint on the - // `net.git-fetch-with-cli` configuration option. - } else if let Some(e) = err.downcast_ref::() { - match e.class() { - ErrorClass::Net - | ErrorClass::Ssl - | ErrorClass::Submodule - | ErrorClass::FetchHead - | ErrorClass::Ssh - | ErrorClass::Callback - | ErrorClass::Http => { - let mut msg = "network failure seems to have happened\n".to_string(); - msg.push_str( - "if a proxy or similar is necessary `net.git-fetch-with-cli` may help here\n", - ); - msg.push_str( - "https://doc.rust-lang.org/cargo/reference/config.html#netgit-fetch-with-cli", - ); - err = err.context(msg); - } - _ => {} - } - } - - Err(err) -} - -fn reset(repo: &git2::Repository, obj: &git2::Object<'_>, config: &Config) -> CargoResult<()> { - let mut pb = Progress::new("Checkout", config); - let mut opts = git2::build::CheckoutBuilder::new(); - opts.progress(|_, cur, max| { - drop(pb.tick(cur, max, "")); - }); - debug!("doing reset"); - repo.reset(obj, git2::ResetType::Hard, Some(&mut opts))?; - debug!("reset done"); - Ok(()) -} - -pub fn with_fetch_options( - git_config: &git2::Config, - url: &str, - config: &Config, - cb: &mut dyn FnMut(git2::FetchOptions<'_>) -> CargoResult<()>, -) -> CargoResult<()> { - let mut progress = Progress::new("Fetch", config); - network::with_retry(config, || { - with_authentication(url, git_config, |f| { - let mut last_update = Instant::now(); - let mut rcb = git2::RemoteCallbacks::new(); - // We choose `N=10` here to make a `300ms * 10slots ~= 3000ms` - // sliding window for tracking the data transfer rate (in bytes/s). - let mut counter = MetricsCounter::<10>::new(0, last_update); - rcb.credentials(f); - rcb.transfer_progress(|stats| { - let indexed_deltas = stats.indexed_deltas(); - let msg = if indexed_deltas > 0 { - // Resolving deltas. - format!( - ", ({}/{}) resolving deltas", - indexed_deltas, - stats.total_deltas() - ) - } else { - // Receiving objects. - // - // # Caveat - // - // Progress bar relies on git2 calling `transfer_progress` - // to update its transfer rate, but we cannot guarantee a - // periodic call of that callback. Thus if we don't receive - // any data for, say, 10 seconds, the rate will get stuck - // and never go down to 0B/s. - // In the future, we need to find away to update the rate - // even when the callback is not called. - let now = Instant::now(); - // Scrape a `received_bytes` to the counter every 300ms. - if now - last_update > Duration::from_millis(300) { - counter.add(stats.received_bytes(), now); - last_update = now; - } - fn format_bytes(bytes: f32) -> (&'static str, f32) { - static UNITS: [&str; 5] = ["", "Ki", "Mi", "Gi", "Ti"]; - let i = (bytes.log2() / 10.0).min(4.0) as usize; - (UNITS[i], bytes / 1024_f32.powi(i as i32)) - } - let (unit, rate) = format_bytes(counter.rate()); - format!(", {:.2}{}B/s", rate, unit) - }; - progress - .tick(stats.indexed_objects(), stats.total_objects(), &msg) - .is_ok() - }); - - // Create a local anonymous remote in the repository to fetch the - // url - let mut opts = git2::FetchOptions::new(); - opts.remote_callbacks(rcb); - cb(opts) - })?; - Ok(()) - }) -} - -pub fn fetch( - repo: &mut git2::Repository, - url: &str, - reference: &GitReference, - config: &Config, -) -> CargoResult<()> { - if config.frozen() { - anyhow::bail!( - "attempting to update a git repository, but --frozen \ - was specified" - ) - } - if !config.network_allowed() { - anyhow::bail!("can't update a git repository in the offline mode") - } - - // If we're fetching from GitHub, attempt GitHub's special fast path for - // testing if we've already got an up-to-date copy of the repository - match github_up_to_date(repo, url, reference, config) { - Ok(true) => return Ok(()), - Ok(false) => {} - Err(e) => debug!("failed to check github {:?}", e), - } - - // We reuse repositories quite a lot, so before we go through and update the - // repo check to see if it's a little too old and could benefit from a gc. - // In theory this shouldn't be too too expensive compared to the network - // request we're about to issue. - maybe_gc_repo(repo)?; - - // Translate the reference desired here into an actual list of refspecs - // which need to get fetched. Additionally record if we're fetching tags. - let mut refspecs = Vec::new(); - let mut tags = false; - // The `+` symbol on the refspec means to allow a forced (fast-forward) - // update which is needed if there is ever a force push that requires a - // fast-forward. - match reference { - // For branches and tags we can fetch simply one reference and copy it - // locally, no need to fetch other branches/tags. - GitReference::Branch(b) => { - refspecs.push(format!("+refs/heads/{0}:refs/remotes/origin/{0}", b)); - } - GitReference::Tag(t) => { - refspecs.push(format!("+refs/tags/{0}:refs/remotes/origin/tags/{0}", t)); - } - - GitReference::DefaultBranch => { - refspecs.push(String::from("+HEAD:refs/remotes/origin/HEAD")); - } - - GitReference::Rev(rev) => { - if rev.starts_with("refs/") { - refspecs.push(format!("+{0}:{0}", rev)); - } else { - // We don't know what the rev will point to. To handle this - // situation we fetch all branches and tags, and then we pray - // it's somewhere in there. - refspecs.push(String::from("+refs/heads/*:refs/remotes/origin/*")); - refspecs.push(String::from("+HEAD:refs/remotes/origin/HEAD")); - tags = true; - } - } - } - - // Unfortunately `libgit2` is notably lacking in the realm of authentication - // when compared to the `git` command line. As a result, allow an escape - // hatch for users that would prefer to use `git`-the-CLI for fetching - // repositories instead of `libgit2`-the-library. This should make more - // flavors of authentication possible while also still giving us all the - // speed and portability of using `libgit2`. - if let Some(true) = config.net_config()?.git_fetch_with_cli { - return fetch_with_cli(repo, url, &refspecs, tags, config); - } - - debug!("doing a fetch for {}", url); - let git_config = git2::Config::open_default()?; - with_fetch_options(&git_config, url, config, &mut |mut opts| { - if tags { - opts.download_tags(git2::AutotagOption::All); - } - // The `fetch` operation here may fail spuriously due to a corrupt - // repository. It could also fail, however, for a whole slew of other - // reasons (aka network related reasons). We want Cargo to automatically - // recover from corrupt repositories, but we don't want Cargo to stomp - // over other legitimate errors. - // - // Consequently we save off the error of the `fetch` operation and if it - // looks like a "corrupt repo" error then we blow away the repo and try - // again. If it looks like any other kind of error, or if we've already - // blown away the repository, then we want to return the error as-is. - let mut repo_reinitialized = false; - loop { - debug!("initiating fetch of {:?} from {}", refspecs, url); - let res = repo - .remote_anonymous(url)? - .fetch(&refspecs, Some(&mut opts), None); - let err = match res { - Ok(()) => break, - Err(e) => e, - }; - debug!("fetch failed: {}", err); - - if !repo_reinitialized && matches!(err.class(), ErrorClass::Reference | ErrorClass::Odb) - { - repo_reinitialized = true; - debug!( - "looks like this is a corrupt repository, reinitializing \ - and trying again" - ); - if reinitialize(repo).is_ok() { - continue; - } - } - - return Err(err.into()); - } - Ok(()) - }) -} - -fn fetch_with_cli( - repo: &mut git2::Repository, - url: &str, - refspecs: &[String], - tags: bool, - config: &Config, -) -> CargoResult<()> { - let mut cmd = ProcessBuilder::new("git"); - cmd.arg("fetch"); - if tags { - cmd.arg("--tags"); - } - cmd.arg("--force") // handle force pushes - .arg("--update-head-ok") // see discussion in #2078 - .arg(url) - .args(refspecs) - // If cargo is run by git (for example, the `exec` command in `git - // rebase`), the GIT_DIR is set by git and will point to the wrong - // location (this takes precedence over the cwd). Make sure this is - // unset so git will look at cwd for the repo. - .env_remove("GIT_DIR") - // The reset of these may not be necessary, but I'm including them - // just to be extra paranoid and avoid any issues. - .env_remove("GIT_WORK_TREE") - .env_remove("GIT_INDEX_FILE") - .env_remove("GIT_OBJECT_DIRECTORY") - .env_remove("GIT_ALTERNATE_OBJECT_DIRECTORIES") - .cwd(repo.path()); - config - .shell() - .verbose(|s| s.status("Running", &cmd.to_string()))?; - cmd.exec_with_output()?; - Ok(()) -} - -/// Cargo has a bunch of long-lived git repositories in its global cache and -/// some, like the index, are updated very frequently. Right now each update -/// creates a new "pack file" inside the git database, and over time this can -/// cause bad performance and bad current behavior in libgit2. -/// -/// One pathological use case today is where libgit2 opens hundreds of file -/// descriptors, getting us dangerously close to blowing out the OS limits of -/// how many fds we can have open. This is detailed in #4403. -/// -/// To try to combat this problem we attempt a `git gc` here. Note, though, that -/// we may not even have `git` installed on the system! As a result we -/// opportunistically try a `git gc` when the pack directory looks too big, and -/// failing that we just blow away the repository and start over. -fn maybe_gc_repo(repo: &mut git2::Repository) -> CargoResult<()> { - // Here we arbitrarily declare that if you have more than 100 files in your - // `pack` folder that we need to do a gc. - let entries = match repo.path().join("objects/pack").read_dir() { - Ok(e) => e.count(), - Err(_) => { - debug!("skipping gc as pack dir appears gone"); - return Ok(()); - } - }; - let max = env::var("__CARGO_PACKFILE_LIMIT") - .ok() - .and_then(|s| s.parse::().ok()) - .unwrap_or(100); - if entries < max { - debug!("skipping gc as there's only {} pack files", entries); - return Ok(()); - } - - // First up, try a literal `git gc` by shelling out to git. This is pretty - // likely to fail though as we may not have `git` installed. Note that - // libgit2 doesn't currently implement the gc operation, so there's no - // equivalent there. - match Command::new("git") - .arg("gc") - .current_dir(repo.path()) - .output() - { - Ok(out) => { - debug!( - "git-gc status: {}\n\nstdout ---\n{}\nstderr ---\n{}", - out.status, - String::from_utf8_lossy(&out.stdout), - String::from_utf8_lossy(&out.stderr) - ); - if out.status.success() { - let new = git2::Repository::open(repo.path())?; - *repo = new; - return Ok(()); - } - } - Err(e) => debug!("git-gc failed to spawn: {}", e), - } - - // Alright all else failed, let's start over. - reinitialize(repo) -} - -fn reinitialize(repo: &mut git2::Repository) -> CargoResult<()> { - // Here we want to drop the current repository object pointed to by `repo`, - // so we initialize temporary repository in a sub-folder, blow away the - // existing git folder, and then recreate the git repo. Finally we blow away - // the `tmp` folder we allocated. - let path = repo.path().to_path_buf(); - debug!("reinitializing git repo at {:?}", path); - let tmp = path.join("tmp"); - let bare = !repo.path().ends_with(".git"); - *repo = init(&tmp, false)?; - for entry in path.read_dir()? { - let entry = entry?; - if entry.file_name().to_str() == Some("tmp") { - continue; - } - let path = entry.path(); - drop(paths::remove_file(&path).or_else(|_| paths::remove_dir_all(&path))); - } - *repo = init(&path, bare)?; - paths::remove_dir_all(&tmp)?; - Ok(()) -} - -fn init(path: &Path, bare: bool) -> CargoResult { - let mut opts = git2::RepositoryInitOptions::new(); - // Skip anything related to templates, they just call all sorts of issues as - // we really don't want to use them yet they insist on being used. See #6240 - // for an example issue that comes up. - opts.external_template(false); - opts.bare(bare); - Ok(git2::Repository::init_opts(&path, &opts)?) -} - -/// Updating the index is done pretty regularly so we want it to be as fast as -/// possible. For registries hosted on GitHub (like the crates.io index) there's -/// a fast path available to use [1] to tell us that there's no updates to be -/// made. -/// -/// This function will attempt to hit that fast path and verify that the `oid` -/// is actually the current branch of the repository. If `true` is returned then -/// no update needs to be performed, but if `false` is returned then the -/// standard update logic still needs to happen. -/// -/// [1]: https://developer.github.com/v3/repos/commits/#get-the-sha-1-of-a-commit-reference -/// -/// Note that this function should never cause an actual failure because it's -/// just a fast path. As a result all errors are ignored in this function and we -/// just return a `bool`. Any real errors will be reported through the normal -/// update path above. -fn github_up_to_date( - repo: &mut git2::Repository, - url: &str, - reference: &GitReference, - config: &Config, -) -> CargoResult { - let url = Url::parse(url)?; - if url.host_str() != Some("github.com") { - return Ok(false); - } - - let github_branch_name = match reference { - GitReference::Branch(branch) => branch, - GitReference::Tag(tag) => tag, - GitReference::DefaultBranch => "HEAD", - GitReference::Rev(rev) => { - if rev.starts_with("refs/") { - rev - } else { - debug!("can't use github fast path with `rev = \"{}\"`", rev); - return Ok(false); - } - } - }; - - // This expects GitHub urls in the form `github.com/user/repo` and nothing - // else - let mut pieces = url - .path_segments() - .ok_or_else(|| anyhow!("no path segments on url"))?; - let username = pieces - .next() - .ok_or_else(|| anyhow!("couldn't find username"))?; - let repository = pieces - .next() - .ok_or_else(|| anyhow!("couldn't find repository name"))?; - if pieces.next().is_some() { - anyhow::bail!("too many segments on URL"); - } - - // Trim off the `.git` from the repository, if present, since that's - // optional for GitHub and won't work when we try to use the API as well. - let repository = repository.strip_suffix(".git").unwrap_or(repository); - - let url = format!( - "https://api.github.com/repos/{}/{}/commits/{}", - username, repository, github_branch_name, - ); - let mut handle = config.http()?.borrow_mut(); - debug!("attempting GitHub fast path for {}", url); - handle.get(true)?; - handle.url(&url)?; - handle.useragent("cargo")?; - let mut headers = List::new(); - headers.append("Accept: application/vnd.github.3.sha")?; - headers.append(&format!("If-None-Match: \"{}\"", reference.resolve(repo)?))?; - handle.http_headers(headers)?; - handle.perform()?; - Ok(handle.response_code()? == 304) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/mod.rs deleted file mode 100644 index 7d238d47d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/mod.rs +++ /dev/null @@ -1,13 +0,0 @@ -pub use self::config::SourceConfigMap; -pub use self::directory::DirectorySource; -pub use self::git::GitSource; -pub use self::path::PathSource; -pub use self::registry::{RegistrySource, CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; -pub use self::replaced::ReplacedSource; - -pub mod config; -pub mod directory; -pub mod git; -pub mod path; -pub mod registry; -pub mod replaced; diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/path.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/path.rs deleted file mode 100644 index cc1c98741..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/path.rs +++ /dev/null @@ -1,561 +0,0 @@ -use std::collections::HashSet; -use std::fmt::{self, Debug, Formatter}; -use std::path::{Path, PathBuf}; - -use crate::core::source::MaybePackage; -use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; -use crate::ops; -use crate::util::{internal, CargoResult, Config}; -use anyhow::Context as _; -use cargo_util::paths; -use filetime::FileTime; -use ignore::gitignore::GitignoreBuilder; -use log::{trace, warn}; -use walkdir::WalkDir; - -pub struct PathSource<'cfg> { - source_id: SourceId, - path: PathBuf, - updated: bool, - packages: Vec, - config: &'cfg Config, - recursive: bool, -} - -impl<'cfg> PathSource<'cfg> { - /// Invoked with an absolute path to a directory that contains a `Cargo.toml`. - /// - /// This source will only return the package at precisely the `path` - /// specified, and it will be an error if there's not a package at `path`. - pub fn new(path: &Path, source_id: SourceId, config: &'cfg Config) -> PathSource<'cfg> { - PathSource { - source_id, - path: path.to_path_buf(), - updated: false, - packages: Vec::new(), - config, - recursive: false, - } - } - - /// Creates a new source which is walked recursively to discover packages. - /// - /// This is similar to the `new` method except that instead of requiring a - /// valid package to be present at `root` the folder is walked entirely to - /// crawl for packages. - /// - /// Note that this should be used with care and likely shouldn't be chosen - /// by default! - pub fn new_recursive(root: &Path, id: SourceId, config: &'cfg Config) -> PathSource<'cfg> { - PathSource { - recursive: true, - ..PathSource::new(root, id, config) - } - } - - pub fn preload_with(&mut self, pkg: Package) { - assert!(!self.updated); - assert!(!self.recursive); - assert!(self.packages.is_empty()); - self.updated = true; - self.packages.push(pkg); - } - - pub fn root_package(&mut self) -> CargoResult { - trace!("root_package; source={:?}", self); - - self.update()?; - - match self.packages.iter().find(|p| p.root() == &*self.path) { - Some(pkg) => Ok(pkg.clone()), - None => Err(internal(format!( - "no package found in source {:?}", - self.path - ))), - } - } - - pub fn read_packages(&self) -> CargoResult> { - if self.updated { - Ok(self.packages.clone()) - } else if self.recursive { - ops::read_packages(&self.path, self.source_id, self.config) - } else { - let path = self.path.join("Cargo.toml"); - let (pkg, _) = ops::read_package(&path, self.source_id, self.config)?; - Ok(vec![pkg]) - } - } - - /// List all files relevant to building this package inside this source. - /// - /// This function will use the appropriate methods to determine the - /// set of files underneath this source's directory which are relevant for - /// building `pkg`. - /// - /// The basic assumption of this method is that all files in the directory - /// are relevant for building this package, but it also contains logic to - /// use other methods like .gitignore to filter the list of files. - pub fn list_files(&self, pkg: &Package) -> CargoResult> { - self._list_files(pkg).with_context(|| { - format!( - "failed to determine list of files in {}", - pkg.root().display() - ) - }) - } - - fn _list_files(&self, pkg: &Package) -> CargoResult> { - let root = pkg.root(); - let no_include_option = pkg.manifest().include().is_empty(); - let git_repo = if no_include_option { - self.discover_git_repo(root)? - } else { - None - }; - - let mut exclude_builder = GitignoreBuilder::new(root); - if no_include_option && git_repo.is_none() { - // no include option and not git repo discovered (see rust-lang/cargo#7183). - exclude_builder.add_line(None, ".*")?; - } - for rule in pkg.manifest().exclude() { - exclude_builder.add_line(None, rule)?; - } - let ignore_exclude = exclude_builder.build()?; - - let mut include_builder = GitignoreBuilder::new(root); - for rule in pkg.manifest().include() { - include_builder.add_line(None, rule)?; - } - let ignore_include = include_builder.build()?; - - let ignore_should_package = |relative_path: &Path, is_dir: bool| { - // "Include" and "exclude" options are mutually exclusive. - if no_include_option { - !ignore_exclude - .matched_path_or_any_parents(relative_path, is_dir) - .is_ignore() - } else { - if is_dir { - // Generally, include directives don't list every - // directory (nor should they!). Just skip all directory - // checks, and only check files. - return true; - } - ignore_include - .matched_path_or_any_parents(relative_path, /* is_dir */ false) - .is_ignore() - } - }; - - let mut filter = |path: &Path, is_dir: bool| { - let relative_path = match path.strip_prefix(root) { - Ok(p) => p, - Err(_) => return false, - }; - - let rel = relative_path.as_os_str(); - if rel == "Cargo.lock" { - return pkg.include_lockfile(); - } else if rel == "Cargo.toml" { - return true; - } - - ignore_should_package(relative_path, is_dir) - }; - - // Attempt Git-prepopulate only if no `include` (see rust-lang/cargo#4135). - if no_include_option { - if let Some(repo) = git_repo { - return self.list_files_git(pkg, &repo, &mut filter); - } - } - self.list_files_walk(pkg, &mut filter) - } - - /// Returns `Some(git2::Repository)` if found sibling `Cargo.toml` and `.git` - /// directory; otherwise, caller should fall back on full file list. - fn discover_git_repo(&self, root: &Path) -> CargoResult> { - let repo = match git2::Repository::discover(root) { - Ok(repo) => repo, - Err(e) => { - log::debug!( - "could not discover git repo at or above {}: {}", - root.display(), - e - ); - return Ok(None); - } - }; - let index = repo - .index() - .with_context(|| format!("failed to open git index at {}", repo.path().display()))?; - let repo_root = repo.workdir().ok_or_else(|| { - anyhow::format_err!( - "did not expect repo at {} to be bare", - repo.path().display() - ) - })?; - let repo_relative_path = match paths::strip_prefix_canonical(root, repo_root) { - Ok(p) => p, - Err(e) => { - log::warn!( - "cannot determine if path `{:?}` is in git repo `{:?}`: {:?}", - root, - repo_root, - e - ); - return Ok(None); - } - }; - let manifest_path = repo_relative_path.join("Cargo.toml"); - if index.get_path(&manifest_path, 0).is_some() { - return Ok(Some(repo)); - } - // Package Cargo.toml is not in git, don't use git to guide our selection. - Ok(None) - } - - fn list_files_git( - &self, - pkg: &Package, - repo: &git2::Repository, - filter: &mut dyn FnMut(&Path, bool) -> bool, - ) -> CargoResult> { - warn!("list_files_git {}", pkg.package_id()); - let index = repo.index()?; - let root = repo - .workdir() - .ok_or_else(|| anyhow::format_err!("can't list files on a bare repository"))?; - let pkg_path = pkg.root(); - - let mut ret = Vec::::new(); - - // We use information from the Git repository to guide us in traversing - // its tree. The primary purpose of this is to take advantage of the - // `.gitignore` and auto-ignore files that don't matter. - // - // Here we're also careful to look at both tracked and untracked files as - // the untracked files are often part of a build and may become relevant - // as part of a future commit. - let index_files = index.iter().map(|entry| { - use libgit2_sys::{GIT_FILEMODE_COMMIT, GIT_FILEMODE_LINK}; - // ``is_dir`` is an optimization to avoid calling - // ``fs::metadata`` on every file. - let is_dir = if entry.mode == GIT_FILEMODE_LINK as u32 { - // Let the code below figure out if this symbolic link points - // to a directory or not. - None - } else { - Some(entry.mode == GIT_FILEMODE_COMMIT as u32) - }; - (join(root, &entry.path), is_dir) - }); - let mut opts = git2::StatusOptions::new(); - opts.include_untracked(true); - if let Ok(suffix) = pkg_path.strip_prefix(root) { - opts.pathspec(suffix); - } - let statuses = repo.statuses(Some(&mut opts))?; - let mut skip_paths = HashSet::new(); - let untracked: Vec<_> = statuses - .iter() - .filter_map(|entry| { - match entry.status() { - // Don't include Cargo.lock if it is untracked. Packaging will - // generate a new one as needed. - git2::Status::WT_NEW if entry.path() != Some("Cargo.lock") => { - Some(Ok((join(root, entry.path_bytes()), None))) - } - git2::Status::WT_DELETED => { - let path = match join(root, entry.path_bytes()) { - Ok(p) => p, - Err(e) => return Some(Err(e)), - }; - skip_paths.insert(path); - None - } - _ => None, - } - }) - .collect::>()?; - - let mut subpackages_found = Vec::new(); - - for (file_path, is_dir) in index_files.chain(untracked) { - let file_path = file_path?; - if skip_paths.contains(&file_path) { - continue; - } - - // Filter out files blatantly outside this package. This is helped a - // bit above via the `pathspec` function call, but we need to filter - // the entries in the index as well. - if !file_path.starts_with(pkg_path) { - continue; - } - - match file_path.file_name().and_then(|s| s.to_str()) { - // The `target` directory is never included. - Some("target") => continue, - - // Keep track of all sub-packages found and also strip out all - // matches we've found so far. Note, though, that if we find - // our own `Cargo.toml`, we keep going. - Some("Cargo.toml") => { - let path = file_path.parent().unwrap(); - if path != pkg_path { - warn!("subpackage found: {}", path.display()); - ret.retain(|p| !p.starts_with(path)); - subpackages_found.push(path.to_path_buf()); - continue; - } - } - - _ => {} - } - - // If this file is part of any other sub-package we've found so far, - // skip it. - if subpackages_found.iter().any(|p| file_path.starts_with(p)) { - continue; - } - - // `is_dir` is None for symlinks. The `unwrap` checks if the - // symlink points to a directory. - let is_dir = is_dir.unwrap_or_else(|| file_path.is_dir()); - if is_dir { - warn!(" found submodule {}", file_path.display()); - let rel = file_path.strip_prefix(root)?; - let rel = rel.to_str().ok_or_else(|| { - anyhow::format_err!("invalid utf-8 filename: {}", rel.display()) - })?; - // Git submodules are currently only named through `/` path - // separators, explicitly not `\` which windows uses. Who knew? - let rel = rel.replace(r"\", "/"); - match repo.find_submodule(&rel).and_then(|s| s.open()) { - Ok(repo) => { - let files = self.list_files_git(pkg, &repo, filter)?; - ret.extend(files.into_iter()); - } - Err(..) => { - self.walk(&file_path, &mut ret, false, filter)?; - } - } - } else if filter(&file_path, is_dir) { - assert!(!is_dir); - // We found a file! - warn!(" found {}", file_path.display()); - ret.push(file_path); - } - } - return Ok(ret); - - #[cfg(unix)] - fn join(path: &Path, data: &[u8]) -> CargoResult { - use std::ffi::OsStr; - use std::os::unix::prelude::*; - Ok(path.join(::from_bytes(data))) - } - #[cfg(windows)] - fn join(path: &Path, data: &[u8]) -> CargoResult { - use std::str; - match str::from_utf8(data) { - Ok(s) => Ok(path.join(s)), - Err(e) => Err(anyhow::format_err!( - "cannot process path in git with a non utf8 filename: {}\n{:?}", - e, - data - )), - } - } - } - - fn list_files_walk( - &self, - pkg: &Package, - filter: &mut dyn FnMut(&Path, bool) -> bool, - ) -> CargoResult> { - let mut ret = Vec::new(); - self.walk(pkg.root(), &mut ret, true, filter)?; - Ok(ret) - } - - fn walk( - &self, - path: &Path, - ret: &mut Vec, - is_root: bool, - filter: &mut dyn FnMut(&Path, bool) -> bool, - ) -> CargoResult<()> { - let walkdir = WalkDir::new(path) - .follow_links(true) - .into_iter() - .filter_entry(|entry| { - let path = entry.path(); - let at_root = is_root && entry.depth() == 0; - let is_dir = entry.file_type().is_dir(); - - if !at_root && !filter(path, is_dir) { - return false; - } - - if !is_dir { - return true; - } - - // Don't recurse into any sub-packages that we have. - if !at_root && path.join("Cargo.toml").exists() { - return false; - } - - // Skip root Cargo artifacts. - if is_root - && entry.depth() == 1 - && path.file_name().and_then(|s| s.to_str()) == Some("target") - { - return false; - } - - true - }); - for entry in walkdir { - match entry { - Ok(entry) => { - if !entry.file_type().is_dir() { - ret.push(entry.into_path()); - } - } - Err(err) if err.loop_ancestor().is_some() => { - self.config.shell().warn(err)?; - } - Err(err) => match err.path() { - // If the error occurs with a path, simply recover from it. - // Don't worry about error skipping here, the callers would - // still hit the IO error if they do access it thereafter. - Some(path) => ret.push(path.to_path_buf()), - None => return Err(err.into()), - }, - } - } - - Ok(()) - } - - pub fn last_modified_file(&self, pkg: &Package) -> CargoResult<(FileTime, PathBuf)> { - if !self.updated { - return Err(internal(format!( - "BUG: source `{:?}` was not updated", - self.path - ))); - } - - let mut max = FileTime::zero(); - let mut max_path = PathBuf::new(); - for file in self.list_files(pkg).with_context(|| { - format!( - "failed to determine the most recently modified file in {}", - pkg.root().display() - ) - })? { - // An `fs::stat` error here is either because path is a - // broken symlink, a permissions error, or a race - // condition where this path was `rm`-ed -- either way, - // we can ignore the error and treat the path's `mtime` - // as `0`. - let mtime = paths::mtime(&file).unwrap_or_else(|_| FileTime::zero()); - if mtime > max { - max = mtime; - max_path = file; - } - } - trace!("last modified file {}: {}", self.path.display(), max); - Ok((max, max_path)) - } - - pub fn path(&self) -> &Path { - &self.path - } -} - -impl<'cfg> Debug for PathSource<'cfg> { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - write!(f, "the paths source") - } -} - -impl<'cfg> Source for PathSource<'cfg> { - fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - for s in self.packages.iter().map(|p| p.summary()) { - if dep.matches(s) { - f(s.clone()) - } - } - Ok(()) - } - - fn fuzzy_query(&mut self, _dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - for s in self.packages.iter().map(|p| p.summary()) { - f(s.clone()) - } - Ok(()) - } - - fn supports_checksums(&self) -> bool { - false - } - - fn requires_precise(&self) -> bool { - false - } - - fn source_id(&self) -> SourceId { - self.source_id - } - - fn update(&mut self) -> CargoResult<()> { - if !self.updated { - let packages = self.read_packages()?; - self.packages.extend(packages.into_iter()); - self.updated = true; - } - - Ok(()) - } - - fn download(&mut self, id: PackageId) -> CargoResult { - trace!("getting packages; id={}", id); - - let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id); - pkg.cloned() - .map(MaybePackage::Ready) - .ok_or_else(|| internal(format!("failed to find {} in path source", id))) - } - - fn finish_download(&mut self, _id: PackageId, _data: Vec) -> CargoResult { - panic!("no download should have started") - } - - fn fingerprint(&self, pkg: &Package) -> CargoResult { - let (max, max_path) = self.last_modified_file(pkg)?; - // Note that we try to strip the prefix of this package to get a - // relative path to ensure that the fingerprint remains consistent - // across entire project directory renames. - let max_path = max_path.strip_prefix(&self.path).unwrap_or(&max_path); - Ok(format!("{} ({})", max, max_path.display())) - } - - fn describe(&self) -> String { - match self.source_id.url().to_file_path() { - Ok(path) => path.display().to_string(), - Err(_) => self.source_id.to_string(), - } - } - - fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {} - - fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult { - Ok(false) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/index.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/index.rs deleted file mode 100644 index 3d873acd3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/index.rs +++ /dev/null @@ -1,885 +0,0 @@ -//! Management of the index of a registry source -//! -//! This module contains management of the index and various operations, such as -//! actually parsing the index, looking for crates, etc. This is intended to be -//! abstract over remote indices (downloaded via git) and local registry indices -//! (which are all just present on the filesystem). -//! -//! ## Index Performance -//! -//! One important aspect of the index is that we want to optimize the "happy -//! path" as much as possible. Whenever you type `cargo build` Cargo will -//! *always* reparse the registry and learn about dependency information. This -//! is done because Cargo needs to learn about the upstream crates.io crates -//! that you're using and ensure that the preexisting `Cargo.lock` still matches -//! the current state of the world. -//! -//! Consequently, Cargo "null builds" (the index that Cargo adds to each build -//! itself) need to be fast when accessing the index. The primary performance -//! optimization here is to avoid parsing JSON blobs from the registry if we -//! don't need them. Most secondary optimizations are centered around removing -//! allocations and such, but avoiding parsing JSON is the #1 optimization. -//! -//! When we get queries from the resolver we're given a `Dependency`. This -//! dependency in turn has a version requirement, and with lock files that -//! already exist these version requirements are exact version requirements -//! `=a.b.c`. This means that we in theory only need to parse one line of JSON -//! per query in the registry, the one that matches version `a.b.c`. -//! -//! The crates.io index, however, is not amenable to this form of query. Instead -//! the crates.io index simply is a file where each line is a JSON blob. To -//! learn about the versions in each JSON blob we would need to parse the JSON, -//! defeating the purpose of trying to parse as little as possible. -//! -//! > Note that as a small aside even *loading* the JSON from the registry is -//! > actually pretty slow. For crates.io and remote registries we don't -//! > actually check out the git index on disk because that takes quite some -//! > time and is quite large. Instead we use `libgit2` to read the JSON from -//! > the raw git objects. This in turn can be slow (aka show up high in -//! > profiles) because libgit2 has to do deflate decompression and such. -//! -//! To solve all these issues a strategy is employed here where Cargo basically -//! creates an index into the index. The first time a package is queried about -//! (first time being for an entire computer) Cargo will load the contents -//! (slowly via libgit2) from the registry. It will then (slowly) parse every -//! single line to learn about its versions. Afterwards, however, Cargo will -//! emit a new file (a cache) which is amenable for speedily parsing in future -//! invocations. -//! -//! This cache file is currently organized by basically having the semver -//! version extracted from each JSON blob. That way Cargo can quickly and easily -//! parse all versions contained and which JSON blob they're associated with. -//! The JSON blob then doesn't actually need to get parsed unless the version is -//! parsed. -//! -//! Altogether the initial measurements of this shows a massive improvement for -//! Cargo null build performance. It's expected that the improvements earned -//! here will continue to grow over time in the sense that the previous -//! implementation (parse all lines each time) actually continues to slow down -//! over time as new versions of a crate are published. In any case when first -//! implemented a null build of Cargo itself would parse 3700 JSON blobs from -//! the registry and load 150 blobs from git. Afterwards it parses 150 JSON -//! blobs and loads 0 files git. Removing 200ms or more from Cargo's startup -//! time is certainly nothing to sneeze at! -//! -//! Note that this is just a high-level overview, there's of course lots of -//! details like invalidating caches and whatnot which are handled below, but -//! hopefully those are more obvious inline in the code itself. - -use crate::core::dependency::Dependency; -use crate::core::{PackageId, SourceId, Summary}; -use crate::sources::registry::{RegistryData, RegistryPackage, INDEX_V_MAX}; -use crate::util::interning::InternedString; -use crate::util::{internal, CargoResult, Config, Filesystem, OptVersionReq, ToSemver}; -use anyhow::bail; -use cargo_util::{paths, registry::make_dep_path}; -use log::{debug, info}; -use semver::Version; -use std::collections::{HashMap, HashSet}; -use std::convert::TryInto; -use std::fs; -use std::path::Path; -use std::str; - -/// Crates.io treats hyphen and underscores as interchangeable, but the index and old Cargo do not. -/// Therefore, the index must store uncanonicalized version of the name so old Cargo's can find it. -/// This loop tries all possible combinations of switching hyphen and underscores to find the -/// uncanonicalized one. As all stored inputs have the correct spelling, we start with the spelling -/// as-provided. -struct UncanonicalizedIter<'s> { - input: &'s str, - num_hyphen_underscore: u32, - hyphen_combination_num: u16, -} - -impl<'s> UncanonicalizedIter<'s> { - fn new(input: &'s str) -> Self { - let num_hyphen_underscore = input.chars().filter(|&c| c == '_' || c == '-').count() as u32; - UncanonicalizedIter { - input, - num_hyphen_underscore, - hyphen_combination_num: 0, - } - } -} - -impl<'s> Iterator for UncanonicalizedIter<'s> { - type Item = String; - - fn next(&mut self) -> Option { - if self.hyphen_combination_num > 0 - && self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore - { - return None; - } - - let ret = Some( - self.input - .chars() - .scan(0u16, |s, c| { - // the check against 15 here's to prevent - // shift overflow on inputs with more than 15 hyphens - if (c == '_' || c == '-') && *s <= 15 { - let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0; - let out = if (c == '_') ^ switch { '_' } else { '-' }; - *s += 1; - Some(out) - } else { - Some(c) - } - }) - .collect(), - ); - self.hyphen_combination_num += 1; - ret - } -} - -#[test] -fn no_hyphen() { - assert_eq!( - UncanonicalizedIter::new("test").collect::>(), - vec!["test".to_string()] - ) -} - -#[test] -fn two_hyphen() { - assert_eq!( - UncanonicalizedIter::new("te-_st").collect::>(), - vec![ - "te-_st".to_string(), - "te__st".to_string(), - "te--st".to_string(), - "te_-st".to_string() - ] - ) -} - -#[test] -fn overflow_hyphen() { - assert_eq!( - UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st") - .take(100) - .count(), - 100 - ) -} - -/// Manager for handling the on-disk index. -/// -/// Note that local and remote registries store the index differently. Local -/// is a simple on-disk tree of files of the raw index. Remote registries are -/// stored as a raw git repository. The different means of access are handled -/// via the [`RegistryData`] trait abstraction. -/// -/// This transparently handles caching of the index in a more efficient format. -pub struct RegistryIndex<'cfg> { - source_id: SourceId, - /// Root directory of the index for the registry. - path: Filesystem, - /// Cache of summary data. - /// - /// This is keyed off the package name. The [`Summaries`] value handles - /// loading the summary data. It keeps an optimized on-disk representation - /// of the JSON files, which is created in an as-needed fashion. If it - /// hasn't been cached already, it uses [`RegistryData::load`] to access - /// to JSON files from the index, and the creates the optimized on-disk - /// summary cache. - summaries_cache: HashMap, - /// [`Config`] reference for convenience. - config: &'cfg Config, -} - -/// An internal cache of summaries for a particular package. -/// -/// A list of summaries are loaded from disk via one of two methods: -/// -/// 1. Primarily Cargo will parse the corresponding file for a crate in the -/// upstream crates.io registry. That's just a JSON blob per line which we -/// can parse, extract the version, and then store here. -/// -/// 2. Alternatively, if Cargo has previously run, we'll have a cached index of -/// dependencies for the upstream index. This is a file that Cargo maintains -/// lazily on the local filesystem and is much faster to parse since it -/// doesn't involve parsing all of the JSON. -/// -/// The outward-facing interface of this doesn't matter too much where it's -/// loaded from, but it's important when reading the implementation to note that -/// we try to parse as little as possible! -#[derive(Default)] -struct Summaries { - /// A raw vector of uninterpreted bytes. This is what `Unparsed` start/end - /// fields are indexes into. If a `Summaries` is loaded from the crates.io - /// index then this field will be empty since nothing is `Unparsed`. - raw_data: Vec, - - /// All known versions of a crate, keyed from their `Version` to the - /// possibly parsed or unparsed version of the full summary. - versions: HashMap, -} - -/// A lazily parsed `IndexSummary`. -enum MaybeIndexSummary { - /// A summary which has not been parsed, The `start` and `end` are pointers - /// into `Summaries::raw_data` which this is an entry of. - Unparsed { start: usize, end: usize }, - - /// An actually parsed summary. - Parsed(IndexSummary), -} - -/// A parsed representation of a summary from the index. -/// -/// In addition to a full `Summary` we have information on whether it is `yanked`. -pub struct IndexSummary { - pub summary: Summary, - pub yanked: bool, - /// Schema version, see [`RegistryPackage`]. - v: u32, -} - -/// A representation of the cache on disk that Cargo maintains of summaries. -/// Cargo will initially parse all summaries in the registry and will then -/// serialize that into this form and place it in a new location on disk, -/// ensuring that access in the future is much speedier. -#[derive(Default)] -struct SummariesCache<'a> { - versions: Vec<(Version, &'a [u8])>, -} - -impl<'cfg> RegistryIndex<'cfg> { - pub fn new( - source_id: SourceId, - path: &Filesystem, - config: &'cfg Config, - ) -> RegistryIndex<'cfg> { - RegistryIndex { - source_id, - path: path.clone(), - summaries_cache: HashMap::new(), - config, - } - } - - /// Returns the hash listed for a specified `PackageId`. - pub fn hash(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> CargoResult<&str> { - let req = OptVersionReq::exact(pkg.version()); - let summary = self - .summaries(pkg.name(), &req, load)? - .next() - .ok_or_else(|| internal(format!("no hash listed for {}", pkg)))?; - summary - .summary - .checksum() - .ok_or_else(|| internal(format!("no hash listed for {}", pkg))) - } - - /// Load a list of summaries for `name` package in this registry which - /// match `req` - /// - /// This function will semantically parse the on-disk index, match all - /// versions, and then return an iterator over all summaries which matched. - /// Internally there's quite a few layer of caching to amortize this cost - /// though since this method is called quite a lot on null builds in Cargo. - pub fn summaries<'a, 'b>( - &'a mut self, - name: InternedString, - req: &'b OptVersionReq, - load: &mut dyn RegistryData, - ) -> CargoResult + 'b> - where - 'a: 'b, - { - let source_id = self.source_id; - let config = self.config; - let namespaced_features = self.config.cli_unstable().namespaced_features; - let weak_dep_features = self.config.cli_unstable().weak_dep_features; - - // First up actually parse what summaries we have available. If Cargo - // has run previously this will parse a Cargo-specific cache file rather - // than the registry itself. In effect this is intended to be a quite - // cheap operation. - let summaries = self.load_summaries(name, load)?; - - // Iterate over our summaries, extract all relevant ones which match our - // version requirement, and then parse all corresponding rows in the - // registry. As a reminder this `summaries` method is called for each - // entry in a lock file on every build, so we want to absolutely - // minimize the amount of work being done here and parse as little as - // necessary. - let raw_data = &summaries.raw_data; - let max_version = if namespaced_features || weak_dep_features { - INDEX_V_MAX - } else { - 1 - }; - Ok(summaries - .versions - .iter_mut() - .filter_map(move |(k, v)| if req.matches(k) { Some(v) } else { None }) - .filter_map( - move |maybe| match maybe.parse(config, raw_data, source_id) { - Ok(summary) => Some(summary), - Err(e) => { - info!("failed to parse `{}` registry package: {}", name, e); - None - } - }, - ) - .filter(move |is| { - if is.v > max_version { - debug!( - "unsupported schema version {} ({} {})", - is.v, - is.summary.name(), - is.summary.version() - ); - false - } else { - true - } - }) - .filter(move |is| { - is.summary - .unstable_gate(namespaced_features, weak_dep_features) - .is_ok() - })) - } - - fn load_summaries( - &mut self, - name: InternedString, - load: &mut dyn RegistryData, - ) -> CargoResult<&mut Summaries> { - // If we've previously loaded what versions are present for `name`, just - // return that since our cache should still be valid. - if self.summaries_cache.contains_key(&name) { - return Ok(self.summaries_cache.get_mut(&name).unwrap()); - } - - // Prepare the `RegistryData` which will lazily initialize internal data - // structures. - load.prepare()?; - - // let root = self.config.assert_package_cache_locked(&self.path); - let root = load.assert_index_locked(&self.path); - let cache_root = root.join(".cache"); - let index_version = load.current_version(); - - // See module comment in `registry/mod.rs` for why this is structured - // the way it is. - let fs_name = name - .chars() - .flat_map(|c| c.to_lowercase()) - .collect::(); - let raw_path = make_dep_path(&fs_name, false); - - // Attempt to handle misspellings by searching for a chain of related - // names to the original `raw_path` name. Only return summaries - // associated with the first hit, however. The resolver will later - // reject any candidates that have the wrong name, and with this it'll - // along the way produce helpful "did you mean?" suggestions. - for path in UncanonicalizedIter::new(&raw_path).take(1024) { - let summaries = Summaries::parse( - index_version.as_deref(), - root, - &cache_root, - path.as_ref(), - self.source_id, - load, - self.config, - )?; - if let Some(summaries) = summaries { - self.summaries_cache.insert(name, summaries); - return Ok(self.summaries_cache.get_mut(&name).unwrap()); - } - } - - // If nothing was found then this crate doesn't exists, so just use an - // empty `Summaries` list. - self.summaries_cache.insert(name, Summaries::default()); - Ok(self.summaries_cache.get_mut(&name).unwrap()) - } - - pub fn query_inner( - &mut self, - dep: &Dependency, - load: &mut dyn RegistryData, - yanked_whitelist: &HashSet, - f: &mut dyn FnMut(Summary), - ) -> CargoResult<()> { - if self.config.offline() - && self.query_inner_with_online(dep, load, yanked_whitelist, f, false)? != 0 - { - return Ok(()); - // If offline, and there are no matches, try again with online. - // This is necessary for dependencies that are not used (such as - // target-cfg or optional), but are not downloaded. Normally the - // build should succeed if they are not downloaded and not used, - // but they still need to resolve. If they are actually needed - // then cargo will fail to download and an error message - // indicating that the required dependency is unavailable while - // offline will be displayed. - } - self.query_inner_with_online(dep, load, yanked_whitelist, f, true)?; - Ok(()) - } - - fn query_inner_with_online( - &mut self, - dep: &Dependency, - load: &mut dyn RegistryData, - yanked_whitelist: &HashSet, - f: &mut dyn FnMut(Summary), - online: bool, - ) -> CargoResult { - let source_id = self.source_id; - let summaries = self - .summaries(dep.package_name(), dep.version_req(), load)? - // First filter summaries for `--offline`. If we're online then - // everything is a candidate, otherwise if we're offline we're only - // going to consider candidates which are actually present on disk. - // - // Note: This particular logic can cause problems with - // optional dependencies when offline. If at least 1 version - // of an optional dependency is downloaded, but that version - // does not satisfy the requirements, then resolution will - // fail. Unfortunately, whether or not something is optional - // is not known here. - .filter(|s| (online || load.is_crate_downloaded(s.summary.package_id()))) - // Next filter out all yanked packages. Some yanked packages may - // leak throguh if they're in a whitelist (aka if they were - // previously in `Cargo.lock` - .filter(|s| !s.yanked || yanked_whitelist.contains(&s.summary.package_id())) - .map(|s| s.summary.clone()); - - // Handle `cargo update --precise` here. If specified, our own source - // will have a precise version listed of the form - // `=o->` where `` is the name of a crate on - // this source, `` is the version installed and ` is the - // version requested (argument to `--precise`). - let name = dep.package_name().as_str(); - let precise = match source_id.precise() { - Some(p) if p.starts_with(name) && p[name.len()..].starts_with('=') => { - let mut vers = p[name.len() + 1..].splitn(2, "->"); - let current_vers = vers.next().unwrap().to_semver().unwrap(); - let requested_vers = vers.next().unwrap().to_semver().unwrap(); - Some((current_vers, requested_vers)) - } - _ => None, - }; - let summaries = summaries.filter(|s| match &precise { - Some((current, requested)) => { - if dep.version_req().matches(current) { - // Unfortunately crates.io allows versions to differ only - // by build metadata. This shouldn't be allowed, but since - // it is, this will honor it if requested. However, if not - // specified, then ignore it. - let s_vers = s.version(); - match (s_vers.build.is_empty(), requested.build.is_empty()) { - (true, true) => s_vers == requested, - (true, false) => false, - (false, true) => { - // Strip out the metadata. - s_vers.major == requested.major - && s_vers.minor == requested.minor - && s_vers.patch == requested.patch - && s_vers.pre == requested.pre - } - (false, false) => s_vers == requested, - } - } else { - true - } - } - None => true, - }); - - let mut count = 0; - for summary in summaries { - f(summary); - count += 1; - } - Ok(count) - } - - pub fn is_yanked(&mut self, pkg: PackageId, load: &mut dyn RegistryData) -> CargoResult { - let req = OptVersionReq::exact(pkg.version()); - let found = self - .summaries(pkg.name(), &req, load)? - .any(|summary| summary.yanked); - Ok(found) - } -} - -impl Summaries { - /// Parse out a `Summaries` instances from on-disk state. - /// - /// This will attempt to prefer parsing a previous cache file that already - /// exists from a previous invocation of Cargo (aka you're typing `cargo - /// build` again after typing it previously). If parsing fails or the cache - /// isn't found, then we take a slower path which loads the full descriptor - /// for `relative` from the underlying index (aka typically libgit2 with - /// crates.io) and then parse everything in there. - /// - /// * `index_version` - a version string to describe the current state of - /// the index which for remote registries is the current git sha and - /// for local registries is not available. - /// * `root` - this is the root argument passed to `load` - /// * `cache_root` - this is the root on the filesystem itself of where to - /// store cache files. - /// * `relative` - this is the file we're loading from cache or the index - /// data - /// * `source_id` - the registry's SourceId used when parsing JSON blobs to - /// create summaries. - /// * `load` - the actual index implementation which may be very slow to - /// call. We avoid this if we can. - pub fn parse( - index_version: Option<&str>, - root: &Path, - cache_root: &Path, - relative: &Path, - source_id: SourceId, - load: &mut dyn RegistryData, - config: &Config, - ) -> CargoResult> { - // First up, attempt to load the cache. This could fail for all manner - // of reasons, but consider all of them non-fatal and just log their - // occurrence in case anyone is debugging anything. - let cache_path = cache_root.join(relative); - let mut cache_contents = None; - if let Some(index_version) = index_version { - match fs::read(&cache_path) { - Ok(contents) => match Summaries::parse_cache(contents, index_version) { - Ok(s) => { - log::debug!("fast path for registry cache of {:?}", relative); - if cfg!(debug_assertions) { - cache_contents = Some(s.raw_data); - } else { - return Ok(Some(s)); - } - } - Err(e) => { - log::debug!("failed to parse {:?} cache: {}", relative, e); - } - }, - Err(e) => log::debug!("cache missing for {:?} error: {}", relative, e), - } - } - - // This is the fallback path where we actually talk to libgit2 to load - // information. Here we parse every single line in the index (as we need - // to find the versions) - log::debug!("slow path for {:?}", relative); - let mut ret = Summaries::default(); - let mut hit_closure = false; - let mut cache_bytes = None; - let err = load.load(root, relative, &mut |contents| { - ret.raw_data = contents.to_vec(); - let mut cache = SummariesCache::default(); - hit_closure = true; - for line in split(contents, b'\n') { - // Attempt forwards-compatibility on the index by ignoring - // everything that we ourselves don't understand, that should - // allow future cargo implementations to break the - // interpretation of each line here and older cargo will simply - // ignore the new lines. - let summary = match IndexSummary::parse(config, line, source_id) { - Ok(summary) => summary, - Err(e) => { - // This should only happen when there is an index - // entry from a future version of cargo that this - // version doesn't understand. Hopefully, those future - // versions of cargo correctly set INDEX_V_MAX and - // CURRENT_CACHE_VERSION, otherwise this will skip - // entries in the cache preventing those newer - // versions from reading them (that is, until the - // cache is rebuilt). - log::info!("failed to parse {:?} registry package: {}", relative, e); - continue; - } - }; - let version = summary.summary.package_id().version().clone(); - cache.versions.push((version.clone(), line)); - ret.versions.insert(version, summary.into()); - } - if let Some(index_version) = index_version { - cache_bytes = Some(cache.serialize(index_version)); - } - Ok(()) - }); - - // We ignore lookup failures as those are just crates which don't exist - // or we haven't updated the registry yet. If we actually ran the - // closure though then we care about those errors. - if !hit_closure { - debug_assert!(cache_contents.is_none()); - return Ok(None); - } - err?; - - // If we've got debug assertions enabled and the cache was previously - // present and considered fresh this is where the debug assertions - // actually happens to verify that our cache is indeed fresh and - // computes exactly the same value as before. - if cfg!(debug_assertions) && cache_contents.is_some() && cache_bytes != cache_contents { - panic!( - "original cache contents:\n{:?}\n\ - does not equal new cache contents:\n{:?}\n", - cache_contents.as_ref().map(|s| String::from_utf8_lossy(s)), - cache_bytes.as_ref().map(|s| String::from_utf8_lossy(s)), - ); - } - - // Once we have our `cache_bytes` which represents the `Summaries` we're - // about to return, write that back out to disk so future Cargo - // invocations can use it. - // - // This is opportunistic so we ignore failure here but are sure to log - // something in case of error. - if let Some(cache_bytes) = cache_bytes { - if paths::create_dir_all(cache_path.parent().unwrap()).is_ok() { - let path = Filesystem::new(cache_path.clone()); - config.assert_package_cache_locked(&path); - if let Err(e) = fs::write(cache_path, cache_bytes) { - log::info!("failed to write cache: {}", e); - } - } - } - - Ok(Some(ret)) - } - - /// Parses an open `File` which represents information previously cached by - /// Cargo. - pub fn parse_cache(contents: Vec, last_index_update: &str) -> CargoResult { - let cache = SummariesCache::parse(&contents, last_index_update)?; - let mut ret = Summaries::default(); - for (version, summary) in cache.versions { - let (start, end) = subslice_bounds(&contents, summary); - ret.versions - .insert(version, MaybeIndexSummary::Unparsed { start, end }); - } - ret.raw_data = contents; - return Ok(ret); - - // Returns the start/end offsets of `inner` with `outer`. Asserts that - // `inner` is a subslice of `outer`. - fn subslice_bounds(outer: &[u8], inner: &[u8]) -> (usize, usize) { - let outer_start = outer.as_ptr() as usize; - let outer_end = outer_start + outer.len(); - let inner_start = inner.as_ptr() as usize; - let inner_end = inner_start + inner.len(); - assert!(inner_start >= outer_start); - assert!(inner_end <= outer_end); - (inner_start - outer_start, inner_end - outer_start) - } - } -} - -// Implementation of serializing/deserializing the cache of summaries on disk. -// Currently the format looks like: -// -// +--------------------+----------------------+-------------+---+ -// | cache version byte | index format version | git sha rev | 0 | -// +--------------------+----------------------+-------------+---+ -// -// followed by... -// -// +----------------+---+------------+---+ -// | semver version | 0 | JSON blob | 0 | ... -// +----------------+---+------------+---+ -// -// The idea is that this is a very easy file for Cargo to parse in future -// invocations. The read from disk should be quite fast and then afterwards all -// we need to know is what versions correspond to which JSON blob. -// -// The leading version byte is intended to ensure that there's some level of -// future compatibility against changes to this cache format so if different -// versions of Cargo share the same cache they don't get too confused. The git -// sha lets us know when the file needs to be regenerated (it needs regeneration -// whenever the index itself updates). -// -// Cache versions: -// * `1`: The original version. -// * `2`: Added the "index format version" field so that if the index format -// changes, different versions of cargo won't get confused reading each -// other's caches. -// * `3`: Bumped the version to work around an issue where multiple versions of -// a package were published that differ only by semver metadata. For -// example, openssl-src 110.0.0 and 110.0.0+1.1.0f. Previously, the cache -// would be incorrectly populated with two entries, both 110.0.0. After -// this, the metadata will be correctly included. This isn't really a format -// change, just a version bump to clear the incorrect cache entries. Note: -// the index shouldn't allow these, but unfortunately crates.io doesn't -// check it. - -const CURRENT_CACHE_VERSION: u8 = 3; - -impl<'a> SummariesCache<'a> { - fn parse(data: &'a [u8], last_index_update: &str) -> CargoResult> { - // NB: keep this method in sync with `serialize` below - let (first_byte, rest) = data - .split_first() - .ok_or_else(|| anyhow::format_err!("malformed cache"))?; - if *first_byte != CURRENT_CACHE_VERSION { - bail!("looks like a different Cargo's cache, bailing out"); - } - let index_v_bytes = rest - .get(..4) - .ok_or_else(|| anyhow::anyhow!("cache expected 4 bytes for index version"))?; - let index_v = u32::from_le_bytes(index_v_bytes.try_into().unwrap()); - if index_v != INDEX_V_MAX { - bail!( - "index format version {} doesn't match the version I know ({})", - index_v, - INDEX_V_MAX - ); - } - let rest = &rest[4..]; - - let mut iter = split(rest, 0); - if let Some(update) = iter.next() { - if update != last_index_update.as_bytes() { - bail!( - "cache out of date: current index ({}) != cache ({})", - last_index_update, - str::from_utf8(update)?, - ) - } - } else { - bail!("malformed file"); - } - let mut ret = SummariesCache::default(); - while let Some(version) = iter.next() { - let version = str::from_utf8(version)?; - let version = Version::parse(version)?; - let summary = iter.next().unwrap(); - ret.versions.push((version, summary)); - } - Ok(ret) - } - - fn serialize(&self, index_version: &str) -> Vec { - // NB: keep this method in sync with `parse` above - let size = self - .versions - .iter() - .map(|(_version, data)| (10 + data.len())) - .sum(); - let mut contents = Vec::with_capacity(size); - contents.push(CURRENT_CACHE_VERSION); - contents.extend(&u32::to_le_bytes(INDEX_V_MAX)); - contents.extend_from_slice(index_version.as_bytes()); - contents.push(0); - for (version, data) in self.versions.iter() { - contents.extend_from_slice(version.to_string().as_bytes()); - contents.push(0); - contents.extend_from_slice(data); - contents.push(0); - } - contents - } -} - -impl MaybeIndexSummary { - /// Parses this "maybe a summary" into a `Parsed` for sure variant. - /// - /// Does nothing if this is already `Parsed`, and otherwise the `raw_data` - /// passed in is sliced with the bounds in `Unparsed` and then actually - /// parsed. - fn parse( - &mut self, - config: &Config, - raw_data: &[u8], - source_id: SourceId, - ) -> CargoResult<&IndexSummary> { - let (start, end) = match self { - MaybeIndexSummary::Unparsed { start, end } => (*start, *end), - MaybeIndexSummary::Parsed(summary) => return Ok(summary), - }; - let summary = IndexSummary::parse(config, &raw_data[start..end], source_id)?; - *self = MaybeIndexSummary::Parsed(summary); - match self { - MaybeIndexSummary::Unparsed { .. } => unreachable!(), - MaybeIndexSummary::Parsed(summary) => Ok(summary), - } - } -} - -impl From for MaybeIndexSummary { - fn from(summary: IndexSummary) -> MaybeIndexSummary { - MaybeIndexSummary::Parsed(summary) - } -} - -impl IndexSummary { - /// Parses a line from the registry's index file into an `IndexSummary` for - /// a package. - /// - /// The `line` provided is expected to be valid JSON. - fn parse(config: &Config, line: &[u8], source_id: SourceId) -> CargoResult { - // ****CAUTION**** Please be extremely careful with returning errors - // from this function. Entries that error are not included in the - // index cache, and can cause cargo to get confused when switching - // between different versions that understand the index differently. - // Make sure to consider the INDEX_V_MAX and CURRENT_CACHE_VERSION - // values carefully when making changes here. - let RegistryPackage { - name, - vers, - cksum, - deps, - mut features, - features2, - yanked, - links, - v, - } = serde_json::from_slice(line)?; - let v = v.unwrap_or(1); - log::trace!("json parsed registry {}/{}", name, vers); - let pkgid = PackageId::new(name, &vers, source_id)?; - let deps = deps - .into_iter() - .map(|dep| dep.into_dep(source_id)) - .collect::>>()?; - if let Some(features2) = features2 { - for (name, values) in features2 { - features.entry(name).or_default().extend(values); - } - } - let mut summary = Summary::new(config, pkgid, deps, &features, links)?; - summary.set_checksum(cksum); - Ok(IndexSummary { - summary, - yanked: yanked.unwrap_or(false), - v, - }) - } -} - -fn split(haystack: &[u8], needle: u8) -> impl Iterator { - struct Split<'a> { - haystack: &'a [u8], - needle: u8, - } - - impl<'a> Iterator for Split<'a> { - type Item = &'a [u8]; - - fn next(&mut self) -> Option<&'a [u8]> { - if self.haystack.is_empty() { - return None; - } - let (ret, remaining) = match memchr::memchr(self.needle, self.haystack) { - Some(pos) => (&self.haystack[..pos], &self.haystack[pos + 1..]), - None => (self.haystack, &[][..]), - }; - self.haystack = remaining; - Some(ret) - } - } - - Split { haystack, needle } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/local.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/local.rs deleted file mode 100644 index cccc553ee..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/local.rs +++ /dev/null @@ -1,123 +0,0 @@ -use crate::core::PackageId; -use crate::sources::registry::{MaybeLock, RegistryConfig, RegistryData}; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::{Config, Filesystem}; -use cargo_util::{paths, Sha256}; -use std::fs::File; -use std::io::prelude::*; -use std::io::SeekFrom; -use std::path::Path; - -/// A local registry is a registry that lives on the filesystem as a set of -/// `.crate` files with an `index` directory in the same format as a remote -/// registry. -pub struct LocalRegistry<'cfg> { - index_path: Filesystem, - root: Filesystem, - src_path: Filesystem, - config: &'cfg Config, -} - -impl<'cfg> LocalRegistry<'cfg> { - pub fn new(root: &Path, config: &'cfg Config, name: &str) -> LocalRegistry<'cfg> { - LocalRegistry { - src_path: config.registry_source_path().join(name), - index_path: Filesystem::new(root.join("index")), - root: Filesystem::new(root.to_path_buf()), - config, - } - } -} - -impl<'cfg> RegistryData for LocalRegistry<'cfg> { - fn prepare(&self) -> CargoResult<()> { - Ok(()) - } - - fn index_path(&self) -> &Filesystem { - &self.index_path - } - - fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { - // Note that the `*_unlocked` variant is used here since we're not - // modifying the index and it's required to be externally synchronized. - path.as_path_unlocked() - } - - fn current_version(&self) -> Option { - None - } - - fn load( - &self, - root: &Path, - path: &Path, - data: &mut dyn FnMut(&[u8]) -> CargoResult<()>, - ) -> CargoResult<()> { - data(&paths::read_bytes(&root.join(path))?) - } - - fn config(&mut self) -> CargoResult> { - // Local registries don't have configuration for remote APIs or anything - // like that - Ok(None) - } - - fn update_index(&mut self) -> CargoResult<()> { - // Nothing to update, we just use what's on disk. Verify it actually - // exists though. We don't use any locks as we're just checking whether - // these directories exist. - let root = self.root.clone().into_path_unlocked(); - if !root.is_dir() { - anyhow::bail!("local registry path is not a directory: {}", root.display()) - } - let index_path = self.index_path.clone().into_path_unlocked(); - if !index_path.is_dir() { - anyhow::bail!( - "local registry index path is not a directory: {}", - index_path.display() - ) - } - Ok(()) - } - - fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult { - let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); - - // Note that the usage of `into_path_unlocked` here is because the local - // crate files here never change in that we're not the one writing them, - // so it's not our responsibility to synchronize access to them. - let path = self.root.join(&crate_file).into_path_unlocked(); - let mut crate_file = paths::open(&path)?; - - // If we've already got an unpacked version of this crate, then skip the - // checksum below as it is in theory already verified. - let dst = format!("{}-{}", pkg.name(), pkg.version()); - if self.src_path.join(dst).into_path_unlocked().exists() { - return Ok(MaybeLock::Ready(crate_file)); - } - - self.config.shell().status("Unpacking", pkg)?; - - // We don't actually need to download anything per-se, we just need to - // verify the checksum matches the .crate file itself. - let actual = Sha256::new().update_file(&crate_file)?.finish_hex(); - if actual != checksum { - anyhow::bail!("failed to verify the checksum of `{}`", pkg) - } - - crate_file.seek(SeekFrom::Start(0))?; - - Ok(MaybeLock::Ready(crate_file)) - } - - fn finish_download( - &mut self, - _pkg: PackageId, - _checksum: &str, - _data: &[u8], - ) -> CargoResult { - panic!("this source doesn't download") - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/mod.rs deleted file mode 100644 index d9df11bbf..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/mod.rs +++ /dev/null @@ -1,793 +0,0 @@ -//! A `Source` for registry-based packages. -//! -//! # What's a Registry? -//! -//! Registries are central locations where packages can be uploaded to, -//! discovered, and searched for. The purpose of a registry is to have a -//! location that serves as permanent storage for versions of a crate over time. -//! -//! Compared to git sources, a registry provides many packages as well as many -//! versions simultaneously. Git sources can also have commits deleted through -//! rebasings where registries cannot have their versions deleted. -//! -//! # The Index of a Registry -//! -//! One of the major difficulties with a registry is that hosting so many -//! packages may quickly run into performance problems when dealing with -//! dependency graphs. It's infeasible for cargo to download the entire contents -//! of the registry just to resolve one package's dependencies, for example. As -//! a result, cargo needs some efficient method of querying what packages are -//! available on a registry, what versions are available, and what the -//! dependencies for each version is. -//! -//! One method of doing so would be having the registry expose an HTTP endpoint -//! which can be queried with a list of packages and a response of their -//! dependencies and versions is returned. This is somewhat inefficient however -//! as we may have to hit the endpoint many times and we may have already -//! queried for much of the data locally already (for other packages, for -//! example). This also involves inventing a transport format between the -//! registry and Cargo itself, so this route was not taken. -//! -//! Instead, Cargo communicates with registries through a git repository -//! referred to as the Index. The Index of a registry is essentially an easily -//! query-able version of the registry's database for a list of versions of a -//! package as well as a list of dependencies for each version. -//! -//! Using git to host this index provides a number of benefits: -//! -//! * The entire index can be stored efficiently locally on disk. This means -//! that all queries of a registry can happen locally and don't need to touch -//! the network. -//! -//! * Updates of the index are quite efficient. Using git buys incremental -//! updates, compressed transmission, etc for free. The index must be updated -//! each time we need fresh information from a registry, but this is one -//! update of a git repository that probably hasn't changed a whole lot so -//! it shouldn't be too expensive. -//! -//! Additionally, each modification to the index is just appending a line at -//! the end of a file (the exact format is described later). This means that -//! the commits for an index are quite small and easily applied/compressible. -//! -//! ## The format of the Index -//! -//! The index is a store for the list of versions for all packages known, so its -//! format on disk is optimized slightly to ensure that `ls registry` doesn't -//! produce a list of all packages ever known. The index also wants to ensure -//! that there's not a million files which may actually end up hitting -//! filesystem limits at some point. To this end, a few decisions were made -//! about the format of the registry: -//! -//! 1. Each crate will have one file corresponding to it. Each version for a -//! crate will just be a line in this file. -//! 2. There will be two tiers of directories for crate names, under which -//! crates corresponding to those tiers will be located. -//! -//! As an example, this is an example hierarchy of an index: -//! -//! ```notrust -//! . -//! โ”œโ”€โ”€ 3 -//! โ”‚ย ย  โ””โ”€โ”€ u -//! โ”‚ย ย  โ””โ”€โ”€ url -//! โ”œโ”€โ”€ bz -//! โ”‚ย ย  โ””โ”€โ”€ ip -//! โ”‚ย ย  โ””โ”€โ”€ bzip2 -//! โ”œโ”€โ”€ config.json -//! โ”œโ”€โ”€ en -//! โ”‚ย ย  โ””โ”€โ”€ co -//! โ”‚ย ย  โ””โ”€โ”€ encoding -//! โ””โ”€โ”€ li -//! ย ย  โ”œโ”€โ”€ bg -//! ย ย  โ”‚ย ย  โ””โ”€โ”€ libgit2 -//! ย ย  โ””โ”€โ”€ nk -//! ย ย  โ””โ”€โ”€ link-config -//! ``` -//! -//! The root of the index contains a `config.json` file with a few entries -//! corresponding to the registry (see [`RegistryConfig`] below). -//! -//! Otherwise, there are three numbered directories (1, 2, 3) for crates with -//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the -//! crate files underneath them, while the 3 directory is sharded by the first -//! letter of the crate name. -//! -//! Otherwise the top-level directory contains many two-letter directory names, -//! each of which has many sub-folders with two letters. At the end of all these -//! are the actual crate files themselves. -//! -//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as -//! efficient lookup based on the crate name itself. -//! -//! ## Crate files -//! -//! Each file in the index is the history of one crate over time. Each line in -//! the file corresponds to one version of a crate, stored in JSON format (see -//! the `RegistryPackage` structure below). -//! -//! As new versions are published, new lines are appended to this file. The only -//! modifications to this file that should happen over time are yanks of a -//! particular version. -//! -//! # Downloading Packages -//! -//! The purpose of the Index was to provide an efficient method to resolve the -//! dependency graph for a package. So far we only required one network -//! interaction to update the registry's repository (yay!). After resolution has -//! been performed, however we need to download the contents of packages so we -//! can read the full manifest and build the source code. -//! -//! To accomplish this, this source's `download` method will make an HTTP -//! request per-package requested to download tarballs into a local cache. These -//! tarballs will then be unpacked into a destination folder. -//! -//! Note that because versions uploaded to the registry are frozen forever that -//! the HTTP download and unpacking can all be skipped if the version has -//! already been downloaded and unpacked. This caching allows us to only -//! download a package when absolutely necessary. -//! -//! # Filesystem Hierarchy -//! -//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: -//! -//! ```notrust -//! # A folder under which all registry metadata is hosted (similar to -//! # $HOME/.cargo/git) -//! $HOME/.cargo/registry/ -//! -//! # For each registry that cargo knows about (keyed by hostname + hash) -//! # there is a folder which is the checked out version of the index for -//! # the registry in this location. Note that this is done so cargo can -//! # support multiple registries simultaneously -//! index/ -//! registry1-/ -//! registry2-/ -//! ... -//! -//! # This folder is a cache for all downloaded tarballs from a registry. -//! # Once downloaded and verified, a tarball never changes. -//! cache/ -//! registry1-/-.crate -//! ... -//! -//! # Location in which all tarballs are unpacked. Each tarball is known to -//! # be frozen after downloading, so transitively this folder is also -//! # frozen once its unpacked (it's never unpacked again) -//! src/ -//! registry1-/-/... -//! ... -//! ``` - -use std::borrow::Cow; -use std::collections::BTreeMap; -use std::collections::HashSet; -use std::fs::{File, OpenOptions}; -use std::io::Write; -use std::path::{Path, PathBuf}; - -use anyhow::Context as _; -use flate2::read::GzDecoder; -use log::debug; -use semver::Version; -use serde::Deserialize; -use tar::Archive; - -use crate::core::dependency::{DepKind, Dependency}; -use crate::core::source::MaybePackage; -use crate::core::{Package, PackageId, Source, SourceId, Summary}; -use crate::sources::PathSource; -use crate::util::hex; -use crate::util::interning::InternedString; -use crate::util::into_url::IntoUrl; -use crate::util::{restricted_names, CargoResult, Config, Filesystem, OptVersionReq}; - -const PACKAGE_SOURCE_LOCK: &str = ".cargo-ok"; -pub const CRATES_IO_INDEX: &str = "https://github.com/rust-lang/crates.io-index"; -pub const CRATES_IO_REGISTRY: &str = "crates-io"; -pub const CRATES_IO_DOMAIN: &str = "crates.io"; -const CRATE_TEMPLATE: &str = "{crate}"; -const VERSION_TEMPLATE: &str = "{version}"; -const PREFIX_TEMPLATE: &str = "{prefix}"; -const LOWER_PREFIX_TEMPLATE: &str = "{lowerprefix}"; -const CHECKSUM_TEMPLATE: &str = "{sha256-checksum}"; - -/// A "source" for a local (see `local::LocalRegistry`) or remote (see -/// `remote::RemoteRegistry`) registry. -/// -/// This contains common functionality that is shared between the two registry -/// kinds, with the registry-specific logic implemented as part of the -/// [`RegistryData`] trait referenced via the `ops` field. -pub struct RegistrySource<'cfg> { - source_id: SourceId, - /// The path where crate files are extracted (`$CARGO_HOME/registry/src/$REG-HASH`). - src_path: Filesystem, - /// Local reference to [`Config`] for convenience. - config: &'cfg Config, - /// Whether or not the index has been updated. - /// - /// This is used as an optimization to avoid updating if not needed, such - /// as `Cargo.lock` already exists and the index already contains the - /// locked entries. Or, to avoid updating multiple times. - /// - /// Only remote registries really need to update. Local registries only - /// check that the index exists. - updated: bool, - /// Abstraction for interfacing to the different registry kinds. - ops: Box, - /// Interface for managing the on-disk index. - index: index::RegistryIndex<'cfg>, - /// A set of packages that should be allowed to be used, even if they are - /// yanked. - /// - /// This is populated from the entries in `Cargo.lock` to ensure that - /// `cargo update -p somepkg` won't unlock yanked entries in `Cargo.lock`. - /// Otherwise, the resolver would think that those entries no longer - /// exist, and it would trigger updates to unrelated packages. - yanked_whitelist: HashSet, -} - -/// The `config.json` file stored in the index. -#[derive(Deserialize)] -pub struct RegistryConfig { - /// Download endpoint for all crates. - /// - /// The string is a template which will generate the download URL for the - /// tarball of a specific version of a crate. The substrings `{crate}` and - /// `{version}` will be replaced with the crate's name and version - /// respectively. The substring `{prefix}` will be replaced with the - /// crate's prefix directory name, and the substring `{lowerprefix}` will - /// be replaced with the crate's prefix directory name converted to - /// lowercase. The substring `{sha256-checksum}` will be replaced with the - /// crate's sha256 checksum. - /// - /// For backwards compatibility, if the string does not contain any - /// markers (`{crate}`, `{version}`, `{prefix}`, or ``{lowerprefix}`), it - /// will be extended with `/{crate}/{version}/download` to - /// support registries like crates.io which were created before the - /// templating setup was created. - pub dl: String, - - /// API endpoint for the registry. This is what's actually hit to perform - /// operations like yanks, owner modifications, publish new crates, etc. - /// If this is None, the registry does not support API commands. - pub api: Option, -} - -/// The maximum version of the `v` field in the index this version of cargo -/// understands. -pub(crate) const INDEX_V_MAX: u32 = 2; - -/// A single line in the index representing a single version of a package. -#[derive(Deserialize)] -pub struct RegistryPackage<'a> { - name: InternedString, - vers: Version, - #[serde(borrow)] - deps: Vec>, - features: BTreeMap>, - /// This field contains features with new, extended syntax. Specifically, - /// namespaced features (`dep:`) and weak dependencies (`pkg?/feat`). - /// - /// This is separated from `features` because versions older than 1.19 - /// will fail to load due to not being able to parse the new syntax, even - /// with a `Cargo.lock` file. - features2: Option>>, - cksum: String, - /// If `true`, Cargo will skip this version when resolving. - /// - /// This was added in 2014. Everything in the crates.io index has this set - /// now, so this probably doesn't need to be an option anymore. - yanked: Option, - /// Native library name this package links to. - /// - /// Added early 2018 (see ), - /// can be `None` if published before then. - links: Option, - /// The schema version for this entry. - /// - /// If this is None, it defaults to version 1. Entries with unknown - /// versions are ignored. - /// - /// Version `2` format adds the `features2` field. - /// - /// This provides a method to safely introduce changes to index entries - /// and allow older versions of cargo to ignore newer entries it doesn't - /// understand. This is honored as of 1.51, so unfortunately older - /// versions will ignore it, and potentially misinterpret version 2 and - /// newer entries. - /// - /// The intent is that versions older than 1.51 will work with a - /// pre-existing `Cargo.lock`, but they may not correctly process `cargo - /// update` or build a lock from scratch. In that case, cargo may - /// incorrectly select a new package that uses a new index format. A - /// workaround is to downgrade any packages that are incompatible with the - /// `--precise` flag of `cargo update`. - v: Option, -} - -#[test] -fn escaped_char_in_json() { - let _: RegistryPackage<'_> = serde_json::from_str( - r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"#, - ) - .unwrap(); - let _: RegistryPackage<'_> = serde_json::from_str( - r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{"test":["k","q"]},"links":"a-sys"}"# - ).unwrap(); - - // Now we add escaped cher all the places they can go - // these are not valid, but it should error later than json parsing - let _: RegistryPackage<'_> = serde_json::from_str( - r#"{ - "name":"This name has a escaped cher in it \n\t\" ", - "vers":"0.0.1", - "deps":[{ - "name": " \n\t\" ", - "req": " \n\t\" ", - "features": [" \n\t\" "], - "optional": true, - "default_features": true, - "target": " \n\t\" ", - "kind": " \n\t\" ", - "registry": " \n\t\" " - }], - "cksum":"bae3", - "features":{"test \n\t\" ":["k \n\t\" ","q \n\t\" "]}, - "links":" \n\t\" "}"#, - ) - .unwrap(); -} - -/// A dependency as encoded in the index JSON. -#[derive(Deserialize)] -struct RegistryDependency<'a> { - name: InternedString, - #[serde(borrow)] - req: Cow<'a, str>, - features: Vec, - optional: bool, - default_features: bool, - target: Option>, - kind: Option>, - registry: Option>, - package: Option, - public: Option, -} - -impl<'a> RegistryDependency<'a> { - /// Converts an encoded dependency in the registry to a cargo dependency - pub fn into_dep(self, default: SourceId) -> CargoResult { - let RegistryDependency { - name, - req, - mut features, - optional, - default_features, - target, - kind, - registry, - package, - public, - } = self; - - let id = if let Some(registry) = ®istry { - SourceId::for_registry(®istry.into_url()?)? - } else { - default - }; - - let mut dep = Dependency::parse(package.unwrap_or(name), Some(&req), id)?; - if package.is_some() { - dep.set_explicit_name_in_toml(name); - } - let kind = match kind.as_deref().unwrap_or("") { - "dev" => DepKind::Development, - "build" => DepKind::Build, - _ => DepKind::Normal, - }; - - let platform = match target { - Some(target) => Some(target.parse()?), - None => None, - }; - - // All dependencies are private by default - let public = public.unwrap_or(false); - - // Unfortunately older versions of cargo and/or the registry ended up - // publishing lots of entries where the features array contained the - // empty feature, "", inside. This confuses the resolution process much - // later on and these features aren't actually valid, so filter them all - // out here. - features.retain(|s| !s.is_empty()); - - // In index, "registry" is null if it is from the same index. - // In Cargo.toml, "registry" is None if it is from the default - if !id.is_default_registry() { - dep.set_registry_id(id); - } - - dep.set_optional(optional) - .set_default_features(default_features) - .set_features(features) - .set_platform(platform) - .set_kind(kind) - .set_public(public); - - Ok(dep) - } -} - -/// An abstract interface to handle both a local (see `local::LocalRegistry`) -/// and remote (see `remote::RemoteRegistry`) registry. -/// -/// This allows [`RegistrySource`] to abstractly handle both registry kinds. -pub trait RegistryData { - /// Performs initialization for the registry. - /// - /// This should be safe to call multiple times, the implementation is - /// expected to not do any work if it is already prepared. - fn prepare(&self) -> CargoResult<()>; - - /// Returns the path to the index. - /// - /// Note that different registries store the index in different formats - /// (remote=git, local=files). - fn index_path(&self) -> &Filesystem; - - /// Loads the JSON for a specific named package from the index. - /// - /// * `root` is the root path to the index. - /// * `path` is the relative path to the package to load (like `ca/rg/cargo`). - /// * `data` is a callback that will receive the raw bytes of the index JSON file. - fn load( - &self, - root: &Path, - path: &Path, - data: &mut dyn FnMut(&[u8]) -> CargoResult<()>, - ) -> CargoResult<()>; - - /// Loads the `config.json` file and returns it. - /// - /// Local registries don't have a config, and return `None`. - fn config(&mut self) -> CargoResult>; - - /// Updates the index. - /// - /// For a remote registry, this updates the index over the network. Local - /// registries only check that the index exists. - fn update_index(&mut self) -> CargoResult<()>; - - /// Prepare to start downloading a `.crate` file. - /// - /// Despite the name, this doesn't actually download anything. If the - /// `.crate` is already downloaded, then it returns [`MaybeLock::Ready`]. - /// If it hasn't been downloaded, then it returns [`MaybeLock::Download`] - /// which contains the URL to download. The [`crate::core::package::Downloads`] - /// system handles the actual download process. After downloading, it - /// calls [`Self::finish_download`] to save the downloaded file. - /// - /// `checksum` is currently only used by local registries to verify the - /// file contents (because local registries never actually download - /// anything). Remote registries will validate the checksum in - /// `finish_download`. For already downloaded `.crate` files, it does not - /// validate the checksum, assuming the filesystem does not suffer from - /// corruption or manipulation. - fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult; - - /// Finish a download by saving a `.crate` file to disk. - /// - /// After [`crate::core::package::Downloads`] has finished a download, - /// it will call this to save the `.crate` file. This is only relevant - /// for remote registries. This should validate the checksum and save - /// the given data to the on-disk cache. - /// - /// Returns a [`File`] handle to the `.crate` file, positioned at the start. - fn finish_download(&mut self, pkg: PackageId, checksum: &str, data: &[u8]) - -> CargoResult; - - /// Returns whether or not the `.crate` file is already downloaded. - fn is_crate_downloaded(&self, _pkg: PackageId) -> bool { - true - } - - /// Validates that the global package cache lock is held. - /// - /// Given the [`Filesystem`], this will make sure that the package cache - /// lock is held. If not, it will panic. See - /// [`Config::acquire_package_cache_lock`] for acquiring the global lock. - /// - /// Returns the [`Path`] to the [`Filesystem`]. - fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path; - - /// Returns the current "version" of the index. - /// - /// For local registries, this returns `None` because there is no - /// versioning. For remote registries, this returns the SHA hash of the - /// git index on disk (or None if the index hasn't been downloaded yet). - /// - /// This is used by index caching to check if the cache is out of date. - fn current_version(&self) -> Option; -} - -/// The status of [`RegistryData::download`] which indicates if a `.crate` -/// file has already been downloaded, or if not then the URL to download. -pub enum MaybeLock { - /// The `.crate` file is already downloaded. [`File`] is a handle to the - /// opened `.crate` file on the filesystem. - Ready(File), - /// The `.crate` file is not downloaded, here's the URL to download it from. - /// - /// `descriptor` is just a text string to display to the user of what is - /// being downloaded. - Download { url: String, descriptor: String }, -} - -mod index; -mod local; -mod remote; - -fn short_name(id: SourceId) -> String { - let hash = hex::short_hash(&id); - let ident = id.url().host_str().unwrap_or("").to_string(); - format!("{}-{}", ident, hash) -} - -impl<'cfg> RegistrySource<'cfg> { - pub fn remote( - source_id: SourceId, - yanked_whitelist: &HashSet, - config: &'cfg Config, - ) -> RegistrySource<'cfg> { - let name = short_name(source_id); - let ops = remote::RemoteRegistry::new(source_id, config, &name); - RegistrySource::new(source_id, config, &name, Box::new(ops), yanked_whitelist) - } - - pub fn local( - source_id: SourceId, - path: &Path, - yanked_whitelist: &HashSet, - config: &'cfg Config, - ) -> RegistrySource<'cfg> { - let name = short_name(source_id); - let ops = local::LocalRegistry::new(path, config, &name); - RegistrySource::new(source_id, config, &name, Box::new(ops), yanked_whitelist) - } - - fn new( - source_id: SourceId, - config: &'cfg Config, - name: &str, - ops: Box, - yanked_whitelist: &HashSet, - ) -> RegistrySource<'cfg> { - RegistrySource { - src_path: config.registry_source_path().join(name), - config, - source_id, - updated: false, - index: index::RegistryIndex::new(source_id, ops.index_path(), config), - yanked_whitelist: yanked_whitelist.clone(), - ops, - } - } - - /// Decode the configuration stored within the registry. - /// - /// This requires that the index has been at least checked out. - pub fn config(&mut self) -> CargoResult> { - self.ops.config() - } - - /// Unpacks a downloaded package into a location where it's ready to be - /// compiled. - /// - /// No action is taken if the source looks like it's already unpacked. - fn unpack_package(&self, pkg: PackageId, tarball: &File) -> CargoResult { - // The `.cargo-ok` file is used to track if the source is already - // unpacked. - let package_dir = format!("{}-{}", pkg.name(), pkg.version()); - let dst = self.src_path.join(&package_dir); - dst.create_dir()?; - let path = dst.join(PACKAGE_SOURCE_LOCK); - let path = self.config.assert_package_cache_locked(&path); - let unpack_dir = path.parent().unwrap(); - if let Ok(meta) = path.metadata() { - if meta.len() > 0 { - return Ok(unpack_dir.to_path_buf()); - } - } - let gz = GzDecoder::new(tarball); - let mut tar = Archive::new(gz); - let prefix = unpack_dir.file_name().unwrap(); - let parent = unpack_dir.parent().unwrap(); - for entry in tar.entries()? { - let mut entry = entry.with_context(|| "failed to iterate over archive")?; - let entry_path = entry - .path() - .with_context(|| "failed to read entry path")? - .into_owned(); - - // We're going to unpack this tarball into the global source - // directory, but we want to make sure that it doesn't accidentally - // (or maliciously) overwrite source code from other crates. Cargo - // itself should never generate a tarball that hits this error, and - // crates.io should also block uploads with these sorts of tarballs, - // but be extra sure by adding a check here as well. - if !entry_path.starts_with(prefix) { - anyhow::bail!( - "invalid tarball downloaded, contains \ - a file at {:?} which isn't under {:?}", - entry_path, - prefix - ) - } - // Unpacking failed - let mut result = entry.unpack_in(parent).map_err(anyhow::Error::from); - if cfg!(windows) && restricted_names::is_windows_reserved_path(&entry_path) { - result = result.with_context(|| { - format!( - "`{}` appears to contain a reserved Windows path, \ - it cannot be extracted on Windows", - entry_path.display() - ) - }); - } - result - .with_context(|| format!("failed to unpack entry at `{}`", entry_path.display()))?; - } - - // The lock file is created after unpacking so we overwrite a lock file - // which may have been extracted from the package. - let mut ok = OpenOptions::new() - .create(true) - .read(true) - .write(true) - .open(&path) - .with_context(|| format!("failed to open `{}`", path.display()))?; - - // Write to the lock file to indicate that unpacking was successful. - write!(ok, "ok")?; - - Ok(unpack_dir.to_path_buf()) - } - - fn do_update(&mut self) -> CargoResult<()> { - self.ops.update_index()?; - let path = self.ops.index_path(); - self.index = index::RegistryIndex::new(self.source_id, path, self.config); - self.updated = true; - Ok(()) - } - - fn get_pkg(&mut self, package: PackageId, path: &File) -> CargoResult { - let path = self - .unpack_package(package, path) - .with_context(|| format!("failed to unpack package `{}`", package))?; - let mut src = PathSource::new(&path, self.source_id, self.config); - src.update()?; - let mut pkg = match src.download(package)? { - MaybePackage::Ready(pkg) => pkg, - MaybePackage::Download { .. } => unreachable!(), - }; - - // After we've loaded the package configure its summary's `checksum` - // field with the checksum we know for this `PackageId`. - let req = OptVersionReq::exact(package.version()); - let summary_with_cksum = self - .index - .summaries(package.name(), &req, &mut *self.ops)? - .map(|s| s.summary.clone()) - .next() - .expect("summary not found"); - if let Some(cksum) = summary_with_cksum.checksum() { - pkg.manifest_mut() - .summary_mut() - .set_checksum(cksum.to_string()); - } - - Ok(pkg) - } -} - -impl<'cfg> Source for RegistrySource<'cfg> { - fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - // If this is a precise dependency, then it came from a lock file and in - // theory the registry is known to contain this version. If, however, we - // come back with no summaries, then our registry may need to be - // updated, so we fall back to performing a lazy update. - if dep.source_id().precise().is_some() && !self.updated { - debug!("attempting query without update"); - let mut called = false; - self.index - .query_inner(dep, &mut *self.ops, &self.yanked_whitelist, &mut |s| { - if dep.matches(&s) { - called = true; - f(s); - } - })?; - if called { - return Ok(()); - } else { - debug!("falling back to an update"); - self.do_update()?; - } - } - - self.index - .query_inner(dep, &mut *self.ops, &self.yanked_whitelist, &mut |s| { - if dep.matches(&s) { - f(s); - } - }) - } - - fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - self.index - .query_inner(dep, &mut *self.ops, &self.yanked_whitelist, f) - } - - fn supports_checksums(&self) -> bool { - true - } - - fn requires_precise(&self) -> bool { - false - } - - fn source_id(&self) -> SourceId { - self.source_id - } - - fn update(&mut self) -> CargoResult<()> { - // If we have an imprecise version then we don't know what we're going - // to look for, so we always attempt to perform an update here. - // - // If we have a precise version, then we'll update lazily during the - // querying phase. Note that precise in this case is only - // `Some("locked")` as other `Some` values indicate a `cargo update - // --precise` request - if self.source_id.precise() != Some("locked") { - self.do_update()?; - } else { - debug!("skipping update due to locked registry"); - } - Ok(()) - } - - fn download(&mut self, package: PackageId) -> CargoResult { - let hash = self.index.hash(package, &mut *self.ops)?; - match self.ops.download(package, hash)? { - MaybeLock::Ready(file) => self.get_pkg(package, &file).map(MaybePackage::Ready), - MaybeLock::Download { url, descriptor } => { - Ok(MaybePackage::Download { url, descriptor }) - } - } - } - - fn finish_download(&mut self, package: PackageId, data: Vec) -> CargoResult { - let hash = self.index.hash(package, &mut *self.ops)?; - let file = self.ops.finish_download(package, hash, &data)?; - self.get_pkg(package, &file) - } - - fn fingerprint(&self, pkg: &Package) -> CargoResult { - Ok(pkg.package_id().version().to_string()) - } - - fn describe(&self) -> String { - self.source_id.display_index() - } - - fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { - self.yanked_whitelist.extend(pkgs); - } - - fn is_yanked(&mut self, pkg: PackageId) -> CargoResult { - if !self.updated { - self.do_update()?; - } - self.index.is_yanked(pkg, &mut *self.ops) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/remote.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/remote.rs deleted file mode 100644 index f3bc0edb5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/registry/remote.rs +++ /dev/null @@ -1,339 +0,0 @@ -use crate::core::{GitReference, PackageId, SourceId}; -use crate::sources::git; -use crate::sources::registry::MaybeLock; -use crate::sources::registry::{ - RegistryConfig, RegistryData, CHECKSUM_TEMPLATE, CRATE_TEMPLATE, LOWER_PREFIX_TEMPLATE, - PREFIX_TEMPLATE, VERSION_TEMPLATE, -}; -use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; -use crate::util::{Config, Filesystem}; -use anyhow::Context as _; -use cargo_util::{paths, registry::make_dep_path, Sha256}; -use lazycell::LazyCell; -use log::{debug, trace}; -use std::cell::{Cell, Ref, RefCell}; -use std::fmt::Write as FmtWrite; -use std::fs::{self, File, OpenOptions}; -use std::io::prelude::*; -use std::io::SeekFrom; -use std::mem; -use std::path::Path; -use std::str; - -/// A remote registry is a registry that lives at a remote URL (such as -/// crates.io). The git index is cloned locally, and `.crate` files are -/// downloaded as needed and cached locally. -pub struct RemoteRegistry<'cfg> { - index_path: Filesystem, - /// Path to the cache of `.crate` files (`$CARGO_HOME/registry/path/$REG-HASH`). - cache_path: Filesystem, - source_id: SourceId, - index_git_ref: GitReference, - config: &'cfg Config, - tree: RefCell>>, - repo: LazyCell, - head: Cell>, - current_sha: Cell>, -} - -impl<'cfg> RemoteRegistry<'cfg> { - pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> { - RemoteRegistry { - index_path: config.registry_index_path().join(name), - cache_path: config.registry_cache_path().join(name), - source_id, - config, - // TODO: we should probably make this configurable - index_git_ref: GitReference::DefaultBranch, - tree: RefCell::new(None), - repo: LazyCell::new(), - head: Cell::new(None), - current_sha: Cell::new(None), - } - } - - fn repo(&self) -> CargoResult<&git2::Repository> { - self.repo.try_borrow_with(|| { - let path = self.config.assert_package_cache_locked(&self.index_path); - - // Fast path without a lock - if let Ok(repo) = git2::Repository::open(&path) { - trace!("opened a repo without a lock"); - return Ok(repo); - } - - // Ok, now we need to lock and try the whole thing over again. - trace!("acquiring registry index lock"); - match git2::Repository::open(&path) { - Ok(repo) => Ok(repo), - Err(_) => { - drop(paths::remove_dir_all(&path)); - paths::create_dir_all(&path)?; - - // Note that we'd actually prefer to use a bare repository - // here as we're not actually going to check anything out. - // All versions of Cargo, though, share the same CARGO_HOME, - // so for compatibility with older Cargo which *does* do - // checkouts we make sure to initialize a new full - // repository (not a bare one). - // - // We should change this to `init_bare` whenever we feel - // like enough time has passed or if we change the directory - // that the folder is located in, such as by changing the - // hash at the end of the directory. - // - // Note that in the meantime we also skip `init.templatedir` - // as it can be misconfigured sometimes or otherwise add - // things that we don't want. - let mut opts = git2::RepositoryInitOptions::new(); - opts.external_template(false); - Ok(git2::Repository::init_opts(&path, &opts).with_context(|| { - format!("failed to initialize index git repository (in {:?})", path) - })?) - } - } - }) - } - - fn head(&self) -> CargoResult { - if self.head.get().is_none() { - let repo = self.repo()?; - let oid = self.index_git_ref.resolve(repo)?; - self.head.set(Some(oid)); - } - Ok(self.head.get().unwrap()) - } - - fn tree(&self) -> CargoResult>> { - { - let tree = self.tree.borrow(); - if tree.is_some() { - return Ok(Ref::map(tree, |s| s.as_ref().unwrap())); - } - } - let repo = self.repo()?; - let commit = repo.find_commit(self.head()?)?; - let tree = commit.tree()?; - - // Unfortunately in libgit2 the tree objects look like they've got a - // reference to the repository object which means that a tree cannot - // outlive the repository that it came from. Here we want to cache this - // tree, though, so to accomplish this we transmute it to a static - // lifetime. - // - // Note that we don't actually hand out the static lifetime, instead we - // only return a scoped one from this function. Additionally the repo - // we loaded from (above) lives as long as this object - // (`RemoteRegistry`) so we then just need to ensure that the tree is - // destroyed first in the destructor, hence the destructor on - // `RemoteRegistry` below. - let tree = unsafe { mem::transmute::, git2::Tree<'static>>(tree) }; - *self.tree.borrow_mut() = Some(tree); - Ok(Ref::map(self.tree.borrow(), |s| s.as_ref().unwrap())) - } - - fn filename(&self, pkg: PackageId) -> String { - format!("{}-{}.crate", pkg.name(), pkg.version()) - } -} - -const LAST_UPDATED_FILE: &str = ".last-updated"; - -impl<'cfg> RegistryData for RemoteRegistry<'cfg> { - fn prepare(&self) -> CargoResult<()> { - self.repo()?; // create intermediate dirs and initialize the repo - Ok(()) - } - - fn index_path(&self) -> &Filesystem { - &self.index_path - } - - fn assert_index_locked<'a>(&self, path: &'a Filesystem) -> &'a Path { - self.config.assert_package_cache_locked(path) - } - - fn current_version(&self) -> Option { - if let Some(sha) = self.current_sha.get() { - return Some(sha); - } - let sha = InternedString::new(&self.head().ok()?.to_string()); - self.current_sha.set(Some(sha)); - Some(sha) - } - - fn load( - &self, - _root: &Path, - path: &Path, - data: &mut dyn FnMut(&[u8]) -> CargoResult<()>, - ) -> CargoResult<()> { - // Note that the index calls this method and the filesystem is locked - // in the index, so we don't need to worry about an `update_index` - // happening in a different process. - let repo = self.repo()?; - let tree = self.tree()?; - let entry = tree.get_path(path)?; - let object = entry.to_object(repo)?; - let blob = match object.as_blob() { - Some(blob) => blob, - None => anyhow::bail!("path `{}` is not a blob in the git repo", path.display()), - }; - data(blob.content()) - } - - fn config(&mut self) -> CargoResult> { - debug!("loading config"); - self.prepare()?; - self.config.assert_package_cache_locked(&self.index_path); - let mut config = None; - self.load(Path::new(""), Path::new("config.json"), &mut |json| { - config = Some(serde_json::from_slice(json)?); - Ok(()) - })?; - trace!("config loaded"); - Ok(config) - } - - fn update_index(&mut self) -> CargoResult<()> { - if self.config.offline() { - return Ok(()); - } - if self.config.cli_unstable().no_index_update { - return Ok(()); - } - // Make sure the index is only updated once per session since it is an - // expensive operation. This generally only happens when the resolver - // is run multiple times, such as during `cargo publish`. - if self.config.updated_sources().contains(&self.source_id) { - return Ok(()); - } - - debug!("updating the index"); - - // Ensure that we'll actually be able to acquire an HTTP handle later on - // once we start trying to download crates. This will weed out any - // problems with `.cargo/config` configuration related to HTTP. - // - // This way if there's a problem the error gets printed before we even - // hit the index, which may not actually read this configuration. - self.config.http()?; - - self.prepare()?; - self.head.set(None); - *self.tree.borrow_mut() = None; - self.current_sha.set(None); - let path = self.config.assert_package_cache_locked(&self.index_path); - self.config - .shell() - .status("Updating", self.source_id.display_index())?; - - // Fetch the latest version of our `index_git_ref` into the index - // checkout. - let url = self.source_id.url(); - let repo = self.repo.borrow_mut().unwrap(); - git::fetch(repo, url.as_str(), &self.index_git_ref, self.config) - .with_context(|| format!("failed to fetch `{}`", url))?; - self.config.updated_sources().insert(self.source_id); - - // Create a dummy file to record the mtime for when we updated the - // index. - paths::create(&path.join(LAST_UPDATED_FILE))?; - - Ok(()) - } - - fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult { - let filename = self.filename(pkg); - - // Attempt to open an read-only copy first to avoid an exclusive write - // lock and also work with read-only filesystems. Note that we check the - // length of the file like below to handle interrupted downloads. - // - // If this fails then we fall through to the exclusive path where we may - // have to redownload the file. - let path = self.cache_path.join(&filename); - let path = self.config.assert_package_cache_locked(&path); - if let Ok(dst) = File::open(&path) { - let meta = dst.metadata()?; - if meta.len() > 0 { - return Ok(MaybeLock::Ready(dst)); - } - } - - let config = self.config()?.unwrap(); - let mut url = config.dl; - if !url.contains(CRATE_TEMPLATE) - && !url.contains(VERSION_TEMPLATE) - && !url.contains(PREFIX_TEMPLATE) - && !url.contains(LOWER_PREFIX_TEMPLATE) - && !url.contains(CHECKSUM_TEMPLATE) - { - write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap(); - } - let prefix = make_dep_path(&*pkg.name(), true); - let url = url - .replace(CRATE_TEMPLATE, &*pkg.name()) - .replace(VERSION_TEMPLATE, &pkg.version().to_string()) - .replace(PREFIX_TEMPLATE, &prefix) - .replace(LOWER_PREFIX_TEMPLATE, &prefix.to_lowercase()) - .replace(CHECKSUM_TEMPLATE, checksum); - - Ok(MaybeLock::Download { - url, - descriptor: pkg.to_string(), - }) - } - - fn finish_download( - &mut self, - pkg: PackageId, - checksum: &str, - data: &[u8], - ) -> CargoResult { - // Verify what we just downloaded - let actual = Sha256::new().update(data).finish_hex(); - if actual != checksum { - anyhow::bail!("failed to verify the checksum of `{}`", pkg) - } - - let filename = self.filename(pkg); - self.cache_path.create_dir()?; - let path = self.cache_path.join(&filename); - let path = self.config.assert_package_cache_locked(&path); - let mut dst = OpenOptions::new() - .create(true) - .read(true) - .write(true) - .open(&path) - .with_context(|| format!("failed to open `{}`", path.display()))?; - let meta = dst.metadata()?; - if meta.len() > 0 { - return Ok(dst); - } - - dst.write_all(data)?; - dst.seek(SeekFrom::Start(0))?; - Ok(dst) - } - - fn is_crate_downloaded(&self, pkg: PackageId) -> bool { - let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); - let path = Path::new(&filename); - - let path = self.cache_path.join(path); - let path = self.config.assert_package_cache_locked(&path); - if let Ok(meta) = fs::metadata(path) { - return meta.len() > 0; - } - false - } -} - -impl<'cfg> Drop for RemoteRegistry<'cfg> { - fn drop(&mut self) { - // Just be sure to drop this before our other fields - self.tree.borrow_mut().take(); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/replaced.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/replaced.rs deleted file mode 100644 index 468df095c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/sources/replaced.rs +++ /dev/null @@ -1,130 +0,0 @@ -use crate::core::source::MaybePackage; -use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary}; -use crate::util::errors::CargoResult; - -use anyhow::Context as _; - -pub struct ReplacedSource<'cfg> { - to_replace: SourceId, - replace_with: SourceId, - inner: Box, -} - -impl<'cfg> ReplacedSource<'cfg> { - pub fn new( - to_replace: SourceId, - replace_with: SourceId, - src: Box, - ) -> ReplacedSource<'cfg> { - ReplacedSource { - to_replace, - replace_with, - inner: src, - } - } -} - -impl<'cfg> Source for ReplacedSource<'cfg> { - fn source_id(&self) -> SourceId { - self.to_replace - } - - fn replaced_source_id(&self) -> SourceId { - self.replace_with - } - - fn supports_checksums(&self) -> bool { - self.inner.supports_checksums() - } - - fn requires_precise(&self) -> bool { - self.inner.requires_precise() - } - - fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - let (replace_with, to_replace) = (self.replace_with, self.to_replace); - let dep = dep.clone().map_source(to_replace, replace_with); - - self.inner - .query(&dep, &mut |summary| { - f(summary.map_source(replace_with, to_replace)) - }) - .with_context(|| format!("failed to query replaced source {}", self.to_replace))?; - Ok(()) - } - - fn fuzzy_query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> { - let (replace_with, to_replace) = (self.replace_with, self.to_replace); - let dep = dep.clone().map_source(to_replace, replace_with); - - self.inner - .fuzzy_query(&dep, &mut |summary| { - f(summary.map_source(replace_with, to_replace)) - }) - .with_context(|| format!("failed to query replaced source {}", self.to_replace))?; - Ok(()) - } - - fn update(&mut self) -> CargoResult<()> { - self.inner - .update() - .with_context(|| format!("failed to update replaced source {}", self.to_replace))?; - Ok(()) - } - - fn download(&mut self, id: PackageId) -> CargoResult { - let id = id.with_source_id(self.replace_with); - let pkg = self - .inner - .download(id) - .with_context(|| format!("failed to download replaced source {}", self.to_replace))?; - Ok(match pkg { - MaybePackage::Ready(pkg) => { - MaybePackage::Ready(pkg.map_source(self.replace_with, self.to_replace)) - } - other @ MaybePackage::Download { .. } => other, - }) - } - - fn finish_download(&mut self, id: PackageId, data: Vec) -> CargoResult { - let id = id.with_source_id(self.replace_with); - let pkg = self - .inner - .finish_download(id, data) - .with_context(|| format!("failed to download replaced source {}", self.to_replace))?; - Ok(pkg.map_source(self.replace_with, self.to_replace)) - } - - fn fingerprint(&self, id: &Package) -> CargoResult { - self.inner.fingerprint(id) - } - - fn verify(&self, id: PackageId) -> CargoResult<()> { - let id = id.with_source_id(self.replace_with); - self.inner.verify(id) - } - - fn describe(&self) -> String { - format!( - "{} (which is replacing {})", - self.inner.describe(), - self.to_replace - ) - } - - fn is_replaced(&self) -> bool { - true - } - - fn add_to_yanked_whitelist(&mut self, pkgs: &[PackageId]) { - let pkgs = pkgs - .iter() - .map(|id| id.with_source_id(self.replace_with)) - .collect::>(); - self.inner.add_to_yanked_whitelist(&pkgs); - } - - fn is_yanked(&mut self, pkg: PackageId) -> CargoResult { - self.inner.is_yanked(pkg) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/canonical_url.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/canonical_url.rs deleted file mode 100644 index 7516e0356..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/canonical_url.rs +++ /dev/null @@ -1,75 +0,0 @@ -use crate::util::errors::CargoResult; -use std::hash::{self, Hash}; -use url::Url; - -/// A newtype wrapper around `Url` which represents a "canonical" version of an -/// original URL. -/// -/// A "canonical" url is only intended for internal comparison purposes in -/// Cargo. It's to help paper over mistakes such as depending on -/// `github.com/foo/bar` vs `github.com/foo/bar.git`. This is **only** for -/// internal purposes within Cargo and provides no means to actually read the -/// underlying string value of the `Url` it contains. This is intentional, -/// because all fetching should still happen within the context of the original -/// URL. -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] -pub struct CanonicalUrl(Url); - -impl CanonicalUrl { - pub fn new(url: &Url) -> CargoResult { - let mut url = url.clone(); - - // cannot-be-a-base-urls (e.g., `github.com:rust-lang/rustfmt.git`) - // are not supported. - if url.cannot_be_a_base() { - anyhow::bail!( - "invalid url `{}`: cannot-be-a-base-URLs are not supported", - url - ) - } - - // Strip a trailing slash. - if url.path().ends_with('/') { - url.path_segments_mut().unwrap().pop_if_empty(); - } - - // For GitHub URLs specifically, just lower-case everything. GitHub - // treats both the same, but they hash differently, and we're gonna be - // hashing them. This wants a more general solution, and also we're - // almost certainly not using the same case conversion rules that GitHub - // does. (See issue #84) - if url.host_str() == Some("github.com") { - url = format!("https{}", &url[url::Position::AfterScheme..]) - .parse() - .unwrap(); - let path = url.path().to_lowercase(); - url.set_path(&path); - } - - // Repos can generally be accessed with or without `.git` extension. - let needs_chopping = url.path().ends_with(".git"); - if needs_chopping { - let last = { - let last = url.path_segments().unwrap().next_back().unwrap(); - last[..last.len() - 4].to_owned() - }; - url.path_segments_mut().unwrap().pop().push(&last); - } - - Ok(CanonicalUrl(url)) - } - - /// Returns the raw canonicalized URL, although beware that this should - /// never be used/displayed/etc, it should only be used for internal data - /// structures and hashes and such. - pub fn raw_canonicalized_url(&self) -> &Url { - &self.0 - } -} - -// See comment in `source_id.rs` for why we explicitly use `as_str()` here. -impl Hash for CanonicalUrl { - fn hash(&self, into: &mut S) { - self.0.as_str().hash(into); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/command_prelude.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/command_prelude.rs deleted file mode 100644 index 07ea49fd2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/command_prelude.rs +++ /dev/null @@ -1,707 +0,0 @@ -use crate::core::compiler::{BuildConfig, MessageFormat}; -use crate::core::resolver::CliFeatures; -use crate::core::{Edition, Workspace}; -use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl}; -use crate::sources::CRATES_IO_REGISTRY; -use crate::util::important_paths::find_root_manifest_for_wd; -use crate::util::interning::InternedString; -use crate::util::restricted_names::is_glob_pattern; -use crate::util::toml::{StringOrVec, TomlProfile}; -use crate::util::validate_package_name; -use crate::util::{ - print_available_benches, print_available_binaries, print_available_examples, - print_available_packages, print_available_tests, -}; -use crate::CargoResult; -use anyhow::bail; -use cargo_util::paths; -use clap::{self, SubCommand}; -use std::ffi::{OsStr, OsString}; -use std::path::PathBuf; - -pub use crate::core::compiler::CompileMode; -pub use crate::{CliError, CliResult, Config}; -pub use clap::{AppSettings, Arg, ArgMatches}; - -pub type App = clap::App<'static, 'static>; - -pub trait AppExt: Sized { - fn _arg(self, arg: Arg<'static, 'static>) -> Self; - - /// Do not use this method, it is only for backwards compatibility. - /// Use `arg_package_spec_no_all` instead. - fn arg_package_spec( - self, - package: &'static str, - all: &'static str, - exclude: &'static str, - ) -> Self { - self.arg_package_spec_no_all(package, all, exclude) - ._arg(opt("all", "Alias for --workspace (deprecated)")) - } - - /// Variant of arg_package_spec that does not include the `--all` flag - /// (but does include `--workspace`). Used to avoid confusion with - /// historical uses of `--all`. - fn arg_package_spec_no_all( - self, - package: &'static str, - all: &'static str, - exclude: &'static str, - ) -> Self { - self.arg_package_spec_simple(package) - ._arg(opt("workspace", all)) - ._arg(multi_opt("exclude", "SPEC", exclude)) - } - - fn arg_package_spec_simple(self, package: &'static str) -> Self { - self._arg(optional_multi_opt("package", "SPEC", package).short("p")) - } - - fn arg_package(self, package: &'static str) -> Self { - self._arg( - optional_opt("package", package) - .short("p") - .value_name("SPEC"), - ) - } - - fn arg_jobs(self) -> Self { - self._arg( - opt("jobs", "Number of parallel jobs, defaults to # of CPUs") - .short("j") - .value_name("N"), - ) - } - - fn arg_targets_all( - self, - lib: &'static str, - bin: &'static str, - bins: &'static str, - example: &'static str, - examples: &'static str, - test: &'static str, - tests: &'static str, - bench: &'static str, - benches: &'static str, - all: &'static str, - ) -> Self { - self.arg_targets_lib_bin_example(lib, bin, bins, example, examples) - ._arg(optional_multi_opt("test", "NAME", test)) - ._arg(opt("tests", tests)) - ._arg(optional_multi_opt("bench", "NAME", bench)) - ._arg(opt("benches", benches)) - ._arg(opt("all-targets", all)) - } - - fn arg_targets_lib_bin_example( - self, - lib: &'static str, - bin: &'static str, - bins: &'static str, - example: &'static str, - examples: &'static str, - ) -> Self { - self._arg(opt("lib", lib)) - ._arg(optional_multi_opt("bin", "NAME", bin)) - ._arg(opt("bins", bins)) - ._arg(optional_multi_opt("example", "NAME", example)) - ._arg(opt("examples", examples)) - } - - fn arg_targets_bins_examples( - self, - bin: &'static str, - bins: &'static str, - example: &'static str, - examples: &'static str, - ) -> Self { - self._arg(optional_multi_opt("bin", "NAME", bin)) - ._arg(opt("bins", bins)) - ._arg(optional_multi_opt("example", "NAME", example)) - ._arg(opt("examples", examples)) - } - - fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self { - self._arg(optional_multi_opt("bin", "NAME", bin)) - ._arg(optional_multi_opt("example", "NAME", example)) - } - - fn arg_features(self) -> Self { - self._arg(multi_opt( - "features", - "FEATURES", - "Space or comma separated list of features to activate", - )) - ._arg(opt("all-features", "Activate all available features")) - ._arg(opt( - "no-default-features", - "Do not activate the `default` feature", - )) - } - - fn arg_release(self, release: &'static str) -> Self { - self._arg(opt("release", release).short("r")) - } - - fn arg_profile(self, profile: &'static str) -> Self { - self._arg(opt("profile", profile).value_name("PROFILE-NAME")) - } - - fn arg_doc(self, doc: &'static str) -> Self { - self._arg(opt("doc", doc)) - } - - fn arg_target_triple(self, target: &'static str) -> Self { - self._arg(multi_opt("target", "TRIPLE", target)) - } - - fn arg_target_dir(self) -> Self { - self._arg( - opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY"), - ) - } - - fn arg_manifest_path(self) -> Self { - self._arg(opt("manifest-path", "Path to Cargo.toml").value_name("PATH")) - } - - fn arg_message_format(self) -> Self { - self._arg(multi_opt("message-format", "FMT", "Error format")) - } - - fn arg_build_plan(self) -> Self { - self._arg(opt( - "build-plan", - "Output the build plan in JSON (unstable)", - )) - } - - fn arg_unit_graph(self) -> Self { - self._arg(opt("unit-graph", "Output build graph in JSON (unstable)")) - } - - fn arg_new_opts(self) -> Self { - self._arg( - opt( - "vcs", - "Initialize a new repository for the given version \ - control system (git, hg, pijul, or fossil) or do not \ - initialize any version control at all (none), overriding \ - a global configuration.", - ) - .value_name("VCS") - .possible_values(&["git", "hg", "pijul", "fossil", "none"]), - ) - ._arg(opt("bin", "Use a binary (application) template [default]")) - ._arg(opt("lib", "Use a library template")) - ._arg( - opt("edition", "Edition to set for the crate generated") - .possible_values(Edition::CLI_VALUES) - .value_name("YEAR"), - ) - ._arg( - opt( - "name", - "Set the resulting package name, defaults to the directory name", - ) - .value_name("NAME"), - ) - } - - fn arg_index(self) -> Self { - self._arg(opt("index", "Registry index URL to upload the package to").value_name("INDEX")) - } - - fn arg_dry_run(self, dry_run: &'static str) -> Self { - self._arg(opt("dry-run", dry_run)) - } - - fn arg_ignore_rust_version(self) -> Self { - self._arg(opt( - "ignore-rust-version", - "Ignore `rust-version` specification in packages", - )) - } - - fn arg_future_incompat_report(self) -> Self { - self._arg(opt( - "future-incompat-report", - "Outputs a future incompatibility report at the end of the build", - )) - } - - fn arg_quiet(self) -> Self { - self._arg(opt("quiet", "Do not print cargo log messages").short("q")) - } -} - -impl AppExt for App { - fn _arg(self, arg: Arg<'static, 'static>) -> Self { - self.arg(arg) - } -} - -pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> { - Arg::with_name(name).long(name).help(help) -} - -pub fn optional_opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> { - opt(name, help).min_values(0) -} - -pub fn optional_multi_opt( - name: &'static str, - value_name: &'static str, - help: &'static str, -) -> Arg<'static, 'static> { - opt(name, help) - .value_name(value_name) - .multiple(true) - .min_values(0) - .number_of_values(1) -} - -pub fn multi_opt( - name: &'static str, - value_name: &'static str, - help: &'static str, -) -> Arg<'static, 'static> { - // Note that all `.multiple(true)` arguments in Cargo should specify - // `.number_of_values(1)` as well, so that `--foo val1 val2` is - // *not* parsed as `foo` with values ["val1", "val2"]. - // `number_of_values` should become the default in clap 3. - opt(name, help) - .value_name(value_name) - .multiple(true) - .number_of_values(1) -} - -pub fn subcommand(name: &'static str) -> App { - SubCommand::with_name(name).settings(&[ - AppSettings::UnifiedHelpMessage, - AppSettings::DeriveDisplayOrder, - AppSettings::DontCollapseArgsInUsage, - ]) -} - -/// Determines whether or not to gate `--profile` as unstable when resolving it. -pub enum ProfileChecking { - /// `cargo rustc` historically has allowed "test", "bench", and "check". This - /// variant explicitly allows those. - LegacyRustc, - /// `cargo check` and `cargo fix` historically has allowed "test". This variant - /// explicitly allows that on stable. - LegacyTestOnly, - /// All other commands, which allow any valid custom named profile. - Custom, -} - -pub trait ArgMatchesExt { - fn value_of_u32(&self, name: &str) -> CargoResult> { - let arg = match self._value_of(name) { - None => None, - Some(arg) => Some(arg.parse::().map_err(|_| { - clap::Error::value_validation_auto(format!("could not parse `{}` as a number", arg)) - })?), - }; - Ok(arg) - } - - /// Returns value of the `name` command-line argument as an absolute path - fn value_of_path(&self, name: &str, config: &Config) -> Option { - self._value_of(name).map(|path| config.cwd().join(path)) - } - - fn root_manifest(&self, config: &Config) -> CargoResult { - if let Some(path) = self.value_of_path("manifest-path", config) { - // In general, we try to avoid normalizing paths in Cargo, - // but in this particular case we need it to fix #3586. - let path = paths::normalize_path(&path); - if !path.ends_with("Cargo.toml") { - anyhow::bail!("the manifest-path must be a path to a Cargo.toml file") - } - if !path.exists() { - anyhow::bail!( - "manifest path `{}` does not exist", - self._value_of("manifest-path").unwrap() - ) - } - return Ok(path); - } - find_root_manifest_for_wd(config.cwd()) - } - - fn workspace<'a>(&self, config: &'a Config) -> CargoResult> { - let root = self.root_manifest(config)?; - let mut ws = Workspace::new(&root, config)?; - if config.cli_unstable().avoid_dev_deps { - ws.set_require_optional_deps(false); - } - Ok(ws) - } - - fn jobs(&self) -> CargoResult> { - self.value_of_u32("jobs") - } - - fn targets(&self) -> Vec { - self._values_of("target") - } - - fn get_profile_name( - &self, - config: &Config, - default: &str, - profile_checking: ProfileChecking, - ) -> CargoResult { - let specified_profile = self._value_of("profile"); - - // Check for allowed legacy names. - // This is an early exit, since it allows combination with `--release`. - match (specified_profile, profile_checking) { - // `cargo rustc` has legacy handling of these names - (Some(name @ ("dev" | "test" | "bench" | "check")), ProfileChecking::LegacyRustc) - // `cargo fix` and `cargo check` has legacy handling of this profile name - | (Some(name @ "test"), ProfileChecking::LegacyTestOnly) => { - if self._is_present("release") { - config.shell().warn( - "the `--release` flag should not be specified with the `--profile` flag\n\ - The `--release` flag will be ignored.\n\ - This was historically accepted, but will become an error \ - in a future release." - )?; - } - return Ok(InternedString::new(name)); - } - _ => {} - } - - let conflict = |flag: &str, equiv: &str, specified: &str| -> anyhow::Error { - anyhow::format_err!( - "conflicting usage of --profile={} and --{flag}\n\ - The `--{flag}` flag is the same as `--profile={equiv}`.\n\ - Remove one flag or the other to continue.", - specified, - flag = flag, - equiv = equiv - ) - }; - - let name = match ( - self._is_present("release"), - self._is_present("debug"), - specified_profile, - ) { - (false, false, None) => default, - (true, _, None | Some("release")) => "release", - (true, _, Some(name)) => return Err(conflict("release", "release", name)), - (_, true, None | Some("dev")) => "dev", - (_, true, Some(name)) => return Err(conflict("debug", "dev", name)), - // `doc` is separate from all the other reservations because - // [profile.doc] was historically allowed, but is deprecated and - // has no effect. To avoid potentially breaking projects, it is a - // warning in Cargo.toml, but since `--profile` is new, we can - // reject it completely here. - (_, _, Some("doc")) => { - bail!("profile `doc` is reserved and not allowed to be explicitly specified") - } - (_, _, Some(name)) => { - TomlProfile::validate_name(name)?; - name - } - }; - - Ok(InternedString::new(name)) - } - - fn packages_from_flags(&self) -> CargoResult { - Packages::from_flags( - // TODO Integrate into 'workspace' - self._is_present("workspace") || self._is_present("all"), - self._values_of("exclude"), - self._values_of("package"), - ) - } - - fn compile_options( - &self, - config: &Config, - mode: CompileMode, - workspace: Option<&Workspace<'_>>, - profile_checking: ProfileChecking, - ) -> CargoResult { - let spec = self.packages_from_flags()?; - let mut message_format = None; - let default_json = MessageFormat::Json { - short: false, - ansi: false, - render_diagnostics: false, - }; - for fmt in self._values_of("message-format") { - for fmt in fmt.split(',') { - let fmt = fmt.to_ascii_lowercase(); - match fmt.as_str() { - "json" => { - if message_format.is_some() { - bail!("cannot specify two kinds of `message-format` arguments"); - } - message_format = Some(default_json); - } - "human" => { - if message_format.is_some() { - bail!("cannot specify two kinds of `message-format` arguments"); - } - message_format = Some(MessageFormat::Human); - } - "short" => { - if message_format.is_some() { - bail!("cannot specify two kinds of `message-format` arguments"); - } - message_format = Some(MessageFormat::Short); - } - "json-render-diagnostics" => { - if message_format.is_none() { - message_format = Some(default_json); - } - match &mut message_format { - Some(MessageFormat::Json { - render_diagnostics, .. - }) => *render_diagnostics = true, - _ => bail!("cannot specify two kinds of `message-format` arguments"), - } - } - "json-diagnostic-short" => { - if message_format.is_none() { - message_format = Some(default_json); - } - match &mut message_format { - Some(MessageFormat::Json { short, .. }) => *short = true, - _ => bail!("cannot specify two kinds of `message-format` arguments"), - } - } - "json-diagnostic-rendered-ansi" => { - if message_format.is_none() { - message_format = Some(default_json); - } - match &mut message_format { - Some(MessageFormat::Json { ansi, .. }) => *ansi = true, - _ => bail!("cannot specify two kinds of `message-format` arguments"), - } - } - s => bail!("invalid message format specifier: `{}`", s), - } - } - } - - let mut build_config = BuildConfig::new(config, self.jobs()?, &self.targets(), mode)?; - build_config.message_format = message_format.unwrap_or(MessageFormat::Human); - build_config.requested_profile = self.get_profile_name(config, "dev", profile_checking)?; - build_config.build_plan = self._is_present("build-plan"); - build_config.unit_graph = self._is_present("unit-graph"); - build_config.future_incompat_report = self._is_present("future-incompat-report"); - if build_config.build_plan { - config - .cli_unstable() - .fail_if_stable_opt("--build-plan", 5579)?; - }; - if build_config.unit_graph { - config - .cli_unstable() - .fail_if_stable_opt("--unit-graph", 8002)?; - } - - let opts = CompileOptions { - build_config, - cli_features: self.cli_features()?, - spec, - filter: CompileFilter::from_raw_arguments( - self._is_present("lib"), - self._values_of("bin"), - self._is_present("bins"), - self._values_of("test"), - self._is_present("tests"), - self._values_of("example"), - self._is_present("examples"), - self._values_of("bench"), - self._is_present("benches"), - self._is_present("all-targets"), - ), - target_rustdoc_args: None, - target_rustc_args: None, - target_rustc_crate_types: None, - local_rustdoc_args: None, - rustdoc_document_private_items: false, - honor_rust_version: !self._is_present("ignore-rust-version"), - }; - - if let Some(ws) = workspace { - self.check_optional_opts(ws, &opts)?; - } else if self.is_present_with_zero_values("package") { - // As for cargo 0.50.0, this won't occur but if someone sneaks in - // we can still provide this informative message for them. - anyhow::bail!( - "\"--package \" requires a SPEC format value, \ - which can be any package ID specifier in the dependency graph.\n\ - Run `cargo help pkgid` for more information about SPEC format." - ) - } - - Ok(opts) - } - - fn cli_features(&self) -> CargoResult { - CliFeatures::from_command_line( - &self._values_of("features"), - self._is_present("all-features"), - !self._is_present("no-default-features"), - ) - } - - fn compile_options_for_single_package( - &self, - config: &Config, - mode: CompileMode, - workspace: Option<&Workspace<'_>>, - profile_checking: ProfileChecking, - ) -> CargoResult { - let mut compile_opts = self.compile_options(config, mode, workspace, profile_checking)?; - let spec = self._values_of("package"); - if spec.iter().any(is_glob_pattern) { - anyhow::bail!("Glob patterns on package selection are not supported.") - } - compile_opts.spec = Packages::Packages(spec); - Ok(compile_opts) - } - - fn new_options(&self, config: &Config) -> CargoResult { - let vcs = self._value_of("vcs").map(|vcs| match vcs { - "git" => VersionControl::Git, - "hg" => VersionControl::Hg, - "pijul" => VersionControl::Pijul, - "fossil" => VersionControl::Fossil, - "none" => VersionControl::NoVcs, - vcs => panic!("Impossible vcs: {:?}", vcs), - }); - NewOptions::new( - vcs, - self._is_present("bin"), - self._is_present("lib"), - self.value_of_path("path", config).unwrap(), - self._value_of("name").map(|s| s.to_string()), - self._value_of("edition").map(|s| s.to_string()), - self.registry(config)?, - ) - } - - fn registry(&self, config: &Config) -> CargoResult> { - match self._value_of("registry") { - Some(registry) => { - validate_package_name(registry, "registry name", "")?; - - if registry == CRATES_IO_REGISTRY { - // If "crates.io" is specified, then we just need to return `None`, - // as that will cause cargo to use crates.io. This is required - // for the case where a default alternative registry is used - // but the user wants to switch back to crates.io for a single - // command. - Ok(None) - } else { - Ok(Some(registry.to_string())) - } - } - None => config.default_registry(), - } - } - - fn index(&self) -> CargoResult> { - let index = self._value_of("index").map(|s| s.to_string()); - Ok(index) - } - - fn check_optional_opts( - &self, - workspace: &Workspace<'_>, - compile_opts: &CompileOptions, - ) -> CargoResult<()> { - if self.is_present_with_zero_values("package") { - print_available_packages(workspace)? - } - - if self.is_present_with_zero_values("example") { - print_available_examples(workspace, compile_opts)?; - } - - if self.is_present_with_zero_values("bin") { - print_available_binaries(workspace, compile_opts)?; - } - - if self.is_present_with_zero_values("bench") { - print_available_benches(workspace, compile_opts)?; - } - - if self.is_present_with_zero_values("test") { - print_available_tests(workspace, compile_opts)?; - } - - Ok(()) - } - - fn is_present_with_zero_values(&self, name: &str) -> bool { - self._is_present(name) && self._value_of(name).is_none() - } - - fn _value_of(&self, name: &str) -> Option<&str>; - - fn _values_of(&self, name: &str) -> Vec; - - fn _value_of_os(&self, name: &str) -> Option<&OsStr>; - - fn _values_of_os(&self, name: &str) -> Vec; - - fn _is_present(&self, name: &str) -> bool; -} - -impl<'a> ArgMatchesExt for ArgMatches<'a> { - fn _value_of(&self, name: &str) -> Option<&str> { - self.value_of(name) - } - - fn _value_of_os(&self, name: &str) -> Option<&OsStr> { - self.value_of_os(name) - } - - fn _values_of(&self, name: &str) -> Vec { - self.values_of(name) - .unwrap_or_default() - .map(|s| s.to_string()) - .collect() - } - - fn _values_of_os(&self, name: &str) -> Vec { - self.values_of_os(name) - .unwrap_or_default() - .map(|s| s.to_os_string()) - .collect() - } - - fn _is_present(&self, name: &str) -> bool { - self.is_present(name) - } -} - -pub fn values(args: &ArgMatches<'_>, name: &str) -> Vec { - args._values_of(name) -} - -pub fn values_os(args: &ArgMatches<'_>, name: &str) -> Vec { - args._values_of_os(name) -} - -#[derive(PartialEq, Eq, PartialOrd, Ord)] -pub enum CommandInfo { - BuiltIn { about: Option }, - External { path: PathBuf }, - Alias { target: StringOrVec }, -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/de.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/de.rs deleted file mode 100644 index 26b149c79..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/de.rs +++ /dev/null @@ -1,532 +0,0 @@ -//! Support for deserializing configuration via `serde` - -use crate::util::config::value; -use crate::util::config::{Config, ConfigError, ConfigKey}; -use crate::util::config::{ConfigValue as CV, Definition, Value}; -use serde::{de, de::IntoDeserializer}; -use std::collections::HashSet; -use std::vec; - -/// Serde deserializer used to convert config values to a target type using -/// `Config::get`. -#[derive(Clone)] -pub(super) struct Deserializer<'config> { - pub(super) config: &'config Config, - /// The current key being deserialized. - pub(super) key: ConfigKey, - /// Whether or not this key part is allowed to be an inner table. For - /// example, `profile.dev.build-override` needs to check if - /// CARGO_PROFILE_DEV_BUILD_OVERRIDE_ prefixes exist. But - /// CARGO_BUILD_TARGET should not check for prefixes because it would - /// collide with CARGO_BUILD_TARGET_DIR. See `ConfigMapAccess` for - /// details. - pub(super) env_prefix_ok: bool, -} - -macro_rules! deserialize_method { - ($method:ident, $visit:ident, $getter:ident) => { - fn $method(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let v = self - .config - .$getter(&self.key)? - .ok_or_else(|| ConfigError::missing(&self.key))?; - let Value { val, definition } = v; - let res: Result = visitor.$visit(val); - res.map_err(|e| e.with_key_context(&self.key, definition)) - } - }; -} - -impl<'de, 'config> de::Deserializer<'de> for Deserializer<'config> { - type Error = ConfigError; - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - let cv = self.config.get_cv_with_env(&self.key)?; - if let Some(cv) = cv { - let res: (Result, Definition) = match cv { - CV::Integer(i, def) => (visitor.visit_i64(i), def), - CV::String(s, def) => (visitor.visit_string(s), def), - CV::List(_, def) => (visitor.visit_seq(ConfigSeqAccess::new(self.clone())?), def), - CV::Table(_, def) => ( - visitor.visit_map(ConfigMapAccess::new_map(self.clone())?), - def, - ), - CV::Boolean(b, def) => (visitor.visit_bool(b), def), - }; - let (res, def) = res; - return res.map_err(|e| e.with_key_context(&self.key, def)); - } - Err(ConfigError::missing(&self.key)) - } - - deserialize_method!(deserialize_bool, visit_bool, get_bool); - deserialize_method!(deserialize_i8, visit_i64, get_integer); - deserialize_method!(deserialize_i16, visit_i64, get_integer); - deserialize_method!(deserialize_i32, visit_i64, get_integer); - deserialize_method!(deserialize_i64, visit_i64, get_integer); - deserialize_method!(deserialize_u8, visit_i64, get_integer); - deserialize_method!(deserialize_u16, visit_i64, get_integer); - deserialize_method!(deserialize_u32, visit_i64, get_integer); - deserialize_method!(deserialize_u64, visit_i64, get_integer); - deserialize_method!(deserialize_string, visit_string, get_string_priv); - - fn deserialize_option(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - if self.config.has_key(&self.key, self.env_prefix_ok) { - visitor.visit_some(self) - } else { - // Treat missing values as `None`. - visitor.visit_none() - } - } - - fn deserialize_struct( - self, - name: &'static str, - fields: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - // Match on the magical struct name/field names that are passed in to - // detect when we're deserializing `Value`. - // - // See more comments in `value.rs` for the protocol used here. - if name == value::NAME && fields == value::FIELDS { - return visitor.visit_map(ValueDeserializer::new(self)?); - } - visitor.visit_map(ConfigMapAccess::new_struct(self, fields)?) - } - - fn deserialize_map(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_map(ConfigMapAccess::new_map(self)?) - } - - fn deserialize_seq(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_seq(ConfigSeqAccess::new(self)?) - } - - fn deserialize_tuple(self, _len: usize, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_seq(ConfigSeqAccess::new(self)?) - } - - fn deserialize_tuple_struct( - self, - _name: &'static str, - _len: usize, - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - visitor.visit_seq(ConfigSeqAccess::new(self)?) - } - - fn deserialize_newtype_struct( - self, - name: &'static str, - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - let merge = if name == "StringList" { - true - } else if name == "UnmergedStringList" { - false - } else { - return visitor.visit_newtype_struct(self); - }; - - let vals = self.config.get_list_or_string(&self.key, merge)?; - let vals: Vec = vals.into_iter().map(|vd| vd.0).collect(); - visitor.visit_newtype_struct(vals.into_deserializer()) - } - - fn deserialize_enum( - self, - _name: &'static str, - _variants: &'static [&'static str], - visitor: V, - ) -> Result - where - V: de::Visitor<'de>, - { - let value = self - .config - .get_string_priv(&self.key)? - .ok_or_else(|| ConfigError::missing(&self.key))?; - - let Value { val, definition } = value; - visitor - .visit_enum(val.into_deserializer()) - .map_err(|e: ConfigError| e.with_key_context(&self.key, definition)) - } - - // These aren't really supported, yet. - serde::forward_to_deserialize_any! { - f32 f64 char str bytes - byte_buf unit unit_struct - identifier ignored_any - } -} - -struct ConfigMapAccess<'config> { - de: Deserializer<'config>, - /// The fields that this map should deserialize. - fields: Vec, - /// Current field being deserialized. - field_index: usize, -} - -#[derive(Debug, PartialEq, Eq, Hash)] -enum KeyKind { - Normal(String), - CaseSensitive(String), -} - -impl<'config> ConfigMapAccess<'config> { - fn new_map(de: Deserializer<'config>) -> Result, ConfigError> { - let mut fields = Vec::new(); - if let Some(mut v) = de.config.get_table(&de.key)? { - // `v: Value>` - for (key, _value) in v.val.drain() { - fields.push(KeyKind::CaseSensitive(key)); - } - } - if de.config.cli_unstable().advanced_env { - // `CARGO_PROFILE_DEV_PACKAGE_` - let env_prefix = format!("{}_", de.key.as_env_key()); - for env_key in de.config.env.keys() { - if env_key.starts_with(&env_prefix) { - // `CARGO_PROFILE_DEV_PACKAGE_bar_OPT_LEVEL = 3` - let rest = &env_key[env_prefix.len()..]; - // `rest = bar_OPT_LEVEL` - let part = rest.splitn(2, '_').next().unwrap(); - // `part = "bar"` - fields.push(KeyKind::CaseSensitive(part.to_string())); - } - } - } - Ok(ConfigMapAccess { - de, - fields, - field_index: 0, - }) - } - - fn new_struct( - de: Deserializer<'config>, - given_fields: &'static [&'static str], - ) -> Result, ConfigError> { - let table = de.config.get_table(&de.key)?; - - // Assume that if we're deserializing a struct it exhaustively lists all - // possible fields on this key that we're *supposed* to use, so take - // this opportunity to warn about any keys that aren't recognized as - // fields and warn about them. - if let Some(v) = table.as_ref() { - let unused_keys = v - .val - .iter() - .filter(|(k, _v)| !given_fields.iter().any(|gk| gk == k)); - for (unused_key, unused_value) in unused_keys { - de.config.shell().warn(format!( - "unused config key `{}.{}` in `{}`", - de.key, - unused_key, - unused_value.definition() - ))?; - } - } - - let mut fields = HashSet::new(); - - // If the caller is interested in a field which we can provide from - // the environment, get it from there. - for field in given_fields { - let mut field_key = de.key.clone(); - field_key.push(field); - for env_key in de.config.env.keys() { - if env_key.starts_with(field_key.as_env_key()) { - fields.insert(KeyKind::Normal(field.to_string())); - } - } - } - - // Add everything from the config table we're interested in that we - // haven't already provided via an environment variable - if let Some(v) = table { - for key in v.val.keys() { - fields.insert(KeyKind::Normal(key.clone())); - } - } - - Ok(ConfigMapAccess { - de, - fields: fields.into_iter().collect(), - field_index: 0, - }) - } -} - -impl<'de, 'config> de::MapAccess<'de> for ConfigMapAccess<'config> { - type Error = ConfigError; - - fn next_key_seed(&mut self, seed: K) -> Result, Self::Error> - where - K: de::DeserializeSeed<'de>, - { - if self.field_index >= self.fields.len() { - return Ok(None); - } - let field = match &self.fields[self.field_index] { - KeyKind::Normal(s) | KeyKind::CaseSensitive(s) => s.as_str(), - }; - seed.deserialize(field.into_deserializer()).map(Some) - } - - fn next_value_seed(&mut self, seed: V) -> Result - where - V: de::DeserializeSeed<'de>, - { - let field = &self.fields[self.field_index]; - self.field_index += 1; - // Set this as the current key in the deserializer. - let field = match field { - KeyKind::Normal(field) => { - self.de.key.push(field); - field - } - KeyKind::CaseSensitive(field) => { - self.de.key.push_sensitive(field); - field - } - }; - // Env vars that are a prefix of another with a dash/underscore cannot - // be supported by our serde implementation, so check for them here. - // Example: - // CARGO_BUILD_TARGET - // CARGO_BUILD_TARGET_DIR - // or - // CARGO_PROFILE_DEV_DEBUG - // CARGO_PROFILE_DEV_DEBUG_ASSERTIONS - // The `deserialize_option` method does not know the type of the field. - // If the type is an Option (like - // `profile.dev.build-override`), then it needs to check for env vars - // starting with CARGO_FOO_BAR_. This is a problem for keys like - // CARGO_BUILD_TARGET because checking for a prefix would incorrectly - // match CARGO_BUILD_TARGET_DIR. `deserialize_option` would have no - // choice but to call `visit_some()` which would then fail if - // CARGO_BUILD_TARGET isn't set. So we check for these prefixes and - // disallow them here. - let env_prefix = format!("{}_", field).replace('-', "_"); - let env_prefix_ok = !self.fields.iter().any(|field| { - let field = match field { - KeyKind::Normal(s) | KeyKind::CaseSensitive(s) => s.as_str(), - }; - field.replace('-', "_").starts_with(&env_prefix) - }); - - let result = seed.deserialize(Deserializer { - config: self.de.config, - key: self.de.key.clone(), - env_prefix_ok, - }); - self.de.key.pop(); - result - } -} - -struct ConfigSeqAccess { - list_iter: vec::IntoIter<(String, Definition)>, -} - -impl ConfigSeqAccess { - fn new(de: Deserializer<'_>) -> Result { - let mut res = Vec::new(); - if let Some(v) = de.config._get_list(&de.key)? { - res.extend(v.val); - } - - de.config.get_env_list(&de.key, &mut res)?; - - Ok(ConfigSeqAccess { - list_iter: res.into_iter(), - }) - } -} - -impl<'de> de::SeqAccess<'de> for ConfigSeqAccess { - type Error = ConfigError; - - fn next_element_seed(&mut self, seed: T) -> Result, Self::Error> - where - T: de::DeserializeSeed<'de>, - { - match self.list_iter.next() { - // TODO: add `def` to error? - Some((value, _def)) => seed.deserialize(value.into_deserializer()).map(Some), - None => Ok(None), - } - } -} - -/// This is a deserializer that deserializes into a `Value` for -/// configuration. -/// -/// This is a special deserializer because it deserializes one of its struct -/// fields into the location that this configuration value was defined in. -/// -/// See more comments in `value.rs` for the protocol used here. -struct ValueDeserializer<'config> { - hits: u32, - definition: Definition, - de: Deserializer<'config>, -} - -impl<'config> ValueDeserializer<'config> { - fn new(de: Deserializer<'config>) -> Result, ConfigError> { - // Figure out where this key is defined. - let definition = { - let env = de.key.as_env_key(); - let env_def = Definition::Environment(env.to_string()); - match (de.config.env.contains_key(env), de.config.get_cv(&de.key)?) { - (true, Some(cv)) => { - // Both, pick highest priority. - if env_def.is_higher_priority(cv.definition()) { - env_def - } else { - cv.definition().clone() - } - } - (false, Some(cv)) => cv.definition().clone(), - // Assume it is an environment, even if the key is not set. - // This can happen for intermediate tables, like - // CARGO_FOO_BAR_* where `CARGO_FOO_BAR` is not set. - (_, None) => env_def, - } - }; - Ok(ValueDeserializer { - hits: 0, - definition, - de, - }) - } -} - -impl<'de, 'config> de::MapAccess<'de> for ValueDeserializer<'config> { - type Error = ConfigError; - - fn next_key_seed(&mut self, seed: K) -> Result, Self::Error> - where - K: de::DeserializeSeed<'de>, - { - self.hits += 1; - match self.hits { - 1 => seed - .deserialize(value::VALUE_FIELD.into_deserializer()) - .map(Some), - 2 => seed - .deserialize(value::DEFINITION_FIELD.into_deserializer()) - .map(Some), - _ => Ok(None), - } - } - - fn next_value_seed(&mut self, seed: V) -> Result - where - V: de::DeserializeSeed<'de>, - { - // If this is the first time around we deserialize the `value` field - // which is the actual deserializer - if self.hits == 1 { - return seed - .deserialize(self.de.clone()) - .map_err(|e| e.with_key_context(&self.de.key, self.definition.clone())); - } - - // ... otherwise we're deserializing the `definition` field, so we need - // to figure out where the field we just deserialized was defined at. - match &self.definition { - Definition::Path(path) => { - seed.deserialize(Tuple2Deserializer(0i32, path.to_string_lossy())) - } - Definition::Environment(env) => { - seed.deserialize(Tuple2Deserializer(1i32, env.as_str())) - } - Definition::Cli => seed.deserialize(Tuple2Deserializer(2i32, "")), - } - } -} - -/// A deserializer which takes two values and deserializes into a tuple of those -/// two values. This is similar to types like `StrDeserializer` in upstream -/// serde itself. -struct Tuple2Deserializer(T, U); - -impl<'de, T, U> de::Deserializer<'de> for Tuple2Deserializer -where - T: IntoDeserializer<'de, ConfigError>, - U: IntoDeserializer<'de, ConfigError>, -{ - type Error = ConfigError; - - fn deserialize_any(self, visitor: V) -> Result - where - V: de::Visitor<'de>, - { - struct SeqVisitor { - first: Option, - second: Option, - } - impl<'de, T, U> de::SeqAccess<'de> for SeqVisitor - where - T: IntoDeserializer<'de, ConfigError>, - U: IntoDeserializer<'de, ConfigError>, - { - type Error = ConfigError; - fn next_element_seed(&mut self, seed: K) -> Result, Self::Error> - where - K: de::DeserializeSeed<'de>, - { - if let Some(first) = self.first.take() { - return seed.deserialize(first.into_deserializer()).map(Some); - } - if let Some(second) = self.second.take() { - return seed.deserialize(second.into_deserializer()).map(Some); - } - Ok(None) - } - } - - visitor.visit_seq(SeqVisitor { - first: Some(self.0), - second: Some(self.1), - }) - } - - serde::forward_to_deserialize_any! { - bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq - bytes byte_buf map struct option unit newtype_struct - ignored_any unit_struct tuple_struct tuple enum identifier - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/key.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/key.rs deleted file mode 100644 index 4ac119174..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/key.rs +++ /dev/null @@ -1,116 +0,0 @@ -use std::borrow::Cow; -use std::fmt; - -/// Key for a configuration variable. -/// -/// This type represents a configuration variable that we're looking up in -/// Cargo's configuration. This structure simultaneously keeps track of a -/// corresponding environment variable name as well as a TOML config name. The -/// intention here is that this is built up and torn down over time efficiently, -/// avoiding clones and such as possible. -#[derive(Debug, Clone)] -pub struct ConfigKey { - // The current environment variable this configuration key maps to. This is - // updated with `push` methods and looks like `CARGO_FOO_BAR` for pushing - // `foo` and then `bar`. - env: String, - // This is used to keep track of how many sub-keys have been pushed on - // this `ConfigKey`. Each element of this vector is a new sub-key pushed - // onto this `ConfigKey`. Each element is a pair where the first item is - // the key part as a string, and the second item is an index into `env`. - // The `env` index is used on `pop` to truncate `env` to rewind back to - // the previous `ConfigKey` state before a `push`. - parts: Vec<(String, usize)>, -} - -impl ConfigKey { - /// Creates a new blank configuration key which is ready to get built up by - /// using `push` and `push_sensitive`. - pub fn new() -> ConfigKey { - ConfigKey { - env: "CARGO".to_string(), - parts: Vec::new(), - } - } - - /// Creates a `ConfigKey` from the `key` specified. - /// - /// The `key` specified is expected to be a period-separated toml - /// configuration key. - pub fn from_str(key: &str) -> ConfigKey { - let mut cfg = ConfigKey::new(); - for part in key.split('.') { - cfg.push(part); - } - cfg - } - - /// Pushes a new sub-key on this `ConfigKey`. This sub-key should be - /// equivalent to accessing a sub-table in TOML. - /// - /// Note that this considers `name` to be case-insensitive, meaning that the - /// corrseponding toml key is appended with this `name` as-is and the - /// corresponding env key is appended with `name` after transforming it to - /// uppercase characters. - pub fn push(&mut self, name: &str) { - let env = name.replace("-", "_").to_uppercase(); - self._push(&env, name); - } - - /// Performs the same function as `push` except that the corresponding - /// environment variable does not get the uppercase letters of `name` but - /// instead `name` is pushed raw onto the corresponding environment - /// variable. - pub fn push_sensitive(&mut self, name: &str) { - self._push(name, name); - } - - fn _push(&mut self, env: &str, config: &str) { - self.parts.push((config.to_string(), self.env.len())); - self.env.push('_'); - self.env.push_str(env); - } - - /// Rewinds this `ConfigKey` back to the state it was at before the last - /// `push` method being called. - pub fn pop(&mut self) { - let (_part, env) = self.parts.pop().unwrap(); - self.env.truncate(env); - } - - /// Returns the corresponding environment variable key for this - /// configuration value. - pub fn as_env_key(&self) -> &str { - &self.env - } - - /// Returns an iterator of the key parts as strings. - pub(crate) fn parts(&self) -> impl Iterator { - self.parts.iter().map(|p| p.0.as_ref()) - } - - /// Returns whether or not this is a key for the root table. - pub fn is_root(&self) -> bool { - self.parts.is_empty() - } -} - -impl fmt::Display for ConfigKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let parts: Vec<_> = self.parts().map(|part| escape_key_part(part)).collect(); - parts.join(".").fmt(f) - } -} - -fn escape_key_part<'a>(part: &'a str) -> Cow<'a, str> { - let ok = part.chars().all(|c| { - matches!(c, - 'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_') - }); - if ok { - Cow::Borrowed(part) - } else { - // This is a bit messy, but toml doesn't expose a function to do this. - Cow::Owned(toml::to_string(&toml::Value::String(part.to_string())).unwrap()) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/mod.rs deleted file mode 100644 index 52a4955fd..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/mod.rs +++ /dev/null @@ -1,2330 +0,0 @@ -//! Cargo's config system. -//! -//! The `Config` object contains general information about the environment, -//! and provides access to Cargo's configuration files. -//! -//! ## Config value API -//! -//! The primary API for fetching user-defined config values is the -//! `Config::get` method. It uses `serde` to translate config values to a -//! target type. -//! -//! There are a variety of helper types for deserializing some common formats: -//! -//! - `value::Value`: This type provides access to the location where the -//! config value was defined. -//! - `ConfigRelativePath`: For a path that is relative to where it is -//! defined. -//! - `PathAndArgs`: Similar to `ConfigRelativePath`, but also supports a list -//! of arguments, useful for programs to execute. -//! - `StringList`: Get a value that is either a list or a whitespace split -//! string. -//! -//! ## Map key recommendations -//! -//! Handling tables that have arbitrary keys can be tricky, particularly if it -//! should support environment variables. In general, if possible, the caller -//! should pass the full key path into the `get()` method so that the config -//! deserializer can properly handle environment variables (which need to be -//! uppercased, and dashes converted to underscores). -//! -//! A good example is the `[target]` table. The code will request -//! `target.$TRIPLE` and the config system can then appropriately fetch -//! environment variables like `CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER`. -//! Conversely, it is not possible do the same thing for the `cfg()` target -//! tables (because Cargo must fetch all of them), so those do not support -//! environment variables. -//! -//! Try to avoid keys that are a prefix of another with a dash/underscore. For -//! example `build.target` and `build.target-dir`. This is OK if these are not -//! structs/maps, but if it is a struct or map, then it will not be able to -//! read the environment variable due to ambiguity. (See `ConfigMapAccess` for -//! more details.) -//! -//! ## Internal API -//! -//! Internally config values are stored with the `ConfigValue` type after they -//! have been loaded from disk. This is similar to the `toml::Value` type, but -//! includes the definition location. The `get()` method uses serde to -//! translate from `ConfigValue` and environment variables to the caller's -//! desired type. - -use std::borrow::Cow; -use std::cell::{RefCell, RefMut}; -use std::collections::hash_map::Entry::{Occupied, Vacant}; -use std::collections::{HashMap, HashSet}; -use std::env; -use std::ffi::OsStr; -use std::fmt; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::io::{self, SeekFrom}; -use std::mem; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::sync::Once; -use std::time::Instant; - -use self::ConfigValue as CV; -use crate::core::compiler::rustdoc::RustdocExternMap; -use crate::core::shell::Verbosity; -use crate::core::{features, CliUnstable, Shell, SourceId, Workspace}; -use crate::ops; -use crate::util::errors::CargoResult; -use crate::util::toml as cargo_toml; -use crate::util::validate_package_name; -use crate::util::{FileLock, Filesystem, IntoUrl, IntoUrlWithBase, Rustc}; -use anyhow::{anyhow, bail, format_err, Context as _}; -use cargo_util::paths; -use curl::easy::Easy; -use lazycell::LazyCell; -use serde::Deserialize; -use url::Url; - -mod de; -use de::Deserializer; - -mod value; -pub use value::{Definition, OptValue, Value}; - -mod key; -pub use key::ConfigKey; - -mod path; -pub use path::{ConfigRelativePath, PathAndArgs}; - -mod target; -pub use target::{TargetCfgConfig, TargetConfig}; - -// Helper macro for creating typed access methods. -macro_rules! get_value_typed { - ($name:ident, $ty:ty, $variant:ident, $expected:expr) => { - /// Low-level private method for getting a config value as an OptValue. - fn $name(&self, key: &ConfigKey) -> Result, ConfigError> { - let cv = self.get_cv(key)?; - let env = self.get_env::<$ty>(key)?; - match (cv, env) { - (Some(CV::$variant(val, definition)), Some(env)) => { - if definition.is_higher_priority(&env.definition) { - Ok(Some(Value { val, definition })) - } else { - Ok(Some(env)) - } - } - (Some(CV::$variant(val, definition)), None) => Ok(Some(Value { val, definition })), - (Some(cv), _) => Err(ConfigError::expected(key, $expected, &cv)), - (None, Some(env)) => Ok(Some(env)), - (None, None) => Ok(None), - } - } - }; -} - -/// Configuration information for cargo. This is not specific to a build, it is information -/// relating to cargo itself. -#[derive(Debug)] -pub struct Config { - /// The location of the user's Cargo home directory. OS-dependent. - home_path: Filesystem, - /// Information about how to write messages to the shell - shell: RefCell, - /// A collection of configuration options - values: LazyCell>, - /// CLI config values, passed in via `configure`. - cli_config: Option>, - /// The current working directory of cargo - cwd: PathBuf, - /// Directory where config file searching should stop (inclusive). - search_stop_path: Option, - /// The location of the cargo executable (path to current process) - cargo_exe: LazyCell, - /// The location of the rustdoc executable - rustdoc: LazyCell, - /// Whether we are printing extra verbose messages - extra_verbose: bool, - /// `frozen` is the same as `locked`, but additionally will not access the - /// network to determine if the lock file is out-of-date. - frozen: bool, - /// `locked` is set if we should not update lock files. If the lock file - /// is missing, or needs to be updated, an error is produced. - locked: bool, - /// `offline` is set if we should never access the network, but otherwise - /// continue operating if possible. - offline: bool, - /// A global static IPC control mechanism (used for managing parallel builds) - jobserver: Option, - /// Cli flags of the form "-Z something" merged with config file values - unstable_flags: CliUnstable, - /// Cli flags of the form "-Z something" - unstable_flags_cli: Option>, - /// A handle on curl easy mode for http calls - easy: LazyCell>, - /// Cache of the `SourceId` for crates.io - crates_io_source_id: LazyCell, - /// If false, don't cache `rustc --version --verbose` invocations - cache_rustc_info: bool, - /// Creation time of this config, used to output the total build time - creation_time: Instant, - /// Target Directory via resolved Cli parameter - target_dir: Option, - /// Environment variables, separated to assist testing. - env: HashMap, - /// Environment variables, converted to uppercase to check for case mismatch - upper_case_env: HashMap, - /// Tracks which sources have been updated to avoid multiple updates. - updated_sources: LazyCell>>, - /// Lock, if held, of the global package cache along with the number of - /// acquisitions so far. - package_cache_lock: RefCell, usize)>>, - /// Cached configuration parsed by Cargo - http_config: LazyCell, - future_incompat_config: LazyCell, - net_config: LazyCell, - build_config: LazyCell, - target_cfgs: LazyCell>, - doc_extern_map: LazyCell, - progress_config: ProgressConfig, - env_config: LazyCell, - /// This should be false if: - /// - this is an artifact of the rustc distribution process for "stable" or for "beta" - /// - this is an `#[test]` that does not opt in with `enable_nightly_features` - /// - this is an integration test that uses `ProcessBuilder` - /// that does not opt in with `masquerade_as_nightly_cargo` - /// This should be true if: - /// - this is an artifact of the rustc distribution process for "nightly" - /// - this is being used in the rustc distribution process internally - /// - this is a cargo executable that was built from source - /// - this is an `#[test]` that called `enable_nightly_features` - /// - this is an integration test that uses `ProcessBuilder` - /// that called `masquerade_as_nightly_cargo` - /// It's public to allow tests use nightly features. - /// NOTE: this should be set before `configure()`. If calling this from an integration test, - /// consider using `ConfigBuilder::enable_nightly_features` instead. - pub nightly_features_allowed: bool, -} - -impl Config { - /// Creates a new config instance. - /// - /// This is typically used for tests or other special cases. `default` is - /// preferred otherwise. - /// - /// This does only minimal initialization. In particular, it does not load - /// any config files from disk. Those will be loaded lazily as-needed. - pub fn new(shell: Shell, cwd: PathBuf, homedir: PathBuf) -> Config { - static mut GLOBAL_JOBSERVER: *mut jobserver::Client = 0 as *mut _; - static INIT: Once = Once::new(); - - // This should be called early on in the process, so in theory the - // unsafety is ok here. (taken ownership of random fds) - INIT.call_once(|| unsafe { - if let Some(client) = jobserver::Client::from_env() { - GLOBAL_JOBSERVER = Box::into_raw(Box::new(client)); - } - }); - - let env: HashMap<_, _> = env::vars_os() - .filter_map(|(k, v)| { - // Ignore any key/values that are not valid Unicode. - match (k.into_string(), v.into_string()) { - (Ok(k), Ok(v)) => Some((k, v)), - _ => None, - } - }) - .collect(); - - let upper_case_env = env - .clone() - .into_iter() - .map(|(k, _)| (k.to_uppercase().replace("-", "_"), k)) - .collect(); - - let cache_rustc_info = match env.get("CARGO_CACHE_RUSTC_INFO") { - Some(cache) => cache != "0", - _ => true, - }; - - Config { - home_path: Filesystem::new(homedir), - shell: RefCell::new(shell), - cwd, - search_stop_path: None, - values: LazyCell::new(), - cli_config: None, - cargo_exe: LazyCell::new(), - rustdoc: LazyCell::new(), - extra_verbose: false, - frozen: false, - locked: false, - offline: false, - jobserver: unsafe { - if GLOBAL_JOBSERVER.is_null() { - None - } else { - Some((*GLOBAL_JOBSERVER).clone()) - } - }, - unstable_flags: CliUnstable::default(), - unstable_flags_cli: None, - easy: LazyCell::new(), - crates_io_source_id: LazyCell::new(), - cache_rustc_info, - creation_time: Instant::now(), - target_dir: None, - env, - upper_case_env, - updated_sources: LazyCell::new(), - package_cache_lock: RefCell::new(None), - http_config: LazyCell::new(), - future_incompat_config: LazyCell::new(), - net_config: LazyCell::new(), - build_config: LazyCell::new(), - target_cfgs: LazyCell::new(), - doc_extern_map: LazyCell::new(), - progress_config: ProgressConfig::default(), - env_config: LazyCell::new(), - nightly_features_allowed: matches!(&*features::channel(), "nightly" | "dev"), - } - } - - /// Creates a new Config instance, with all default settings. - /// - /// This does only minimal initialization. In particular, it does not load - /// any config files from disk. Those will be loaded lazily as-needed. - pub fn default() -> CargoResult { - let shell = Shell::new(); - let cwd = env::current_dir() - .with_context(|| "couldn't get the current directory of the process")?; - let homedir = homedir(&cwd).ok_or_else(|| { - anyhow!( - "Cargo couldn't find your home directory. \ - This probably means that $HOME was not set." - ) - })?; - Ok(Config::new(shell, cwd, homedir)) - } - - /// Gets the user's Cargo home directory (OS-dependent). - pub fn home(&self) -> &Filesystem { - &self.home_path - } - - /// Gets the Cargo Git directory (`/git`). - pub fn git_path(&self) -> Filesystem { - self.home_path.join("git") - } - - /// Gets the Cargo registry index directory (`/registry/index`). - pub fn registry_index_path(&self) -> Filesystem { - self.home_path.join("registry").join("index") - } - - /// Gets the Cargo registry cache directory (`/registry/path`). - pub fn registry_cache_path(&self) -> Filesystem { - self.home_path.join("registry").join("cache") - } - - /// Gets the Cargo registry source directory (`/registry/src`). - pub fn registry_source_path(&self) -> Filesystem { - self.home_path.join("registry").join("src") - } - - /// Gets the default Cargo registry. - pub fn default_registry(&self) -> CargoResult> { - Ok(self - .get_string("registry.default")? - .map(|registry| registry.val)) - } - - /// Gets a reference to the shell, e.g., for writing error messages. - pub fn shell(&self) -> RefMut<'_, Shell> { - self.shell.borrow_mut() - } - - /// Gets the path to the `rustdoc` executable. - pub fn rustdoc(&self) -> CargoResult<&Path> { - self.rustdoc - .try_borrow_with(|| Ok(self.get_tool("rustdoc", &self.build_config()?.rustdoc))) - .map(AsRef::as_ref) - } - - /// Gets the path to the `rustc` executable. - pub fn load_global_rustc(&self, ws: Option<&Workspace<'_>>) -> CargoResult { - let cache_location = ws.map(|ws| { - ws.target_dir() - .join(".rustc_info.json") - .into_path_unlocked() - }); - let wrapper = self.maybe_get_tool("rustc_wrapper", &self.build_config()?.rustc_wrapper); - let rustc_workspace_wrapper = self.maybe_get_tool( - "rustc_workspace_wrapper", - &self.build_config()?.rustc_workspace_wrapper, - ); - - Rustc::new( - self.get_tool("rustc", &self.build_config()?.rustc), - wrapper, - rustc_workspace_wrapper, - &self - .home() - .join("bin") - .join("rustc") - .into_path_unlocked() - .with_extension(env::consts::EXE_EXTENSION), - if self.cache_rustc_info { - cache_location - } else { - None - }, - ) - } - - /// Gets the path to the `cargo` executable. - pub fn cargo_exe(&self) -> CargoResult<&Path> { - self.cargo_exe - .try_borrow_with(|| { - fn from_current_exe() -> CargoResult { - // Try fetching the path to `cargo` using `env::current_exe()`. - // The method varies per operating system and might fail; in particular, - // it depends on `/proc` being mounted on Linux, and some environments - // (like containers or chroots) may not have that available. - let exe = env::current_exe()?.canonicalize()?; - Ok(exe) - } - - fn from_argv() -> CargoResult { - // Grab `argv[0]` and attempt to resolve it to an absolute path. - // If `argv[0]` has one component, it must have come from a `PATH` lookup, - // so probe `PATH` in that case. - // Otherwise, it has multiple components and is either: - // - a relative path (e.g., `./cargo`, `target/debug/cargo`), or - // - an absolute path (e.g., `/usr/local/bin/cargo`). - // In either case, `Path::canonicalize` will return the full absolute path - // to the target if it exists. - let argv0 = env::args_os() - .map(PathBuf::from) - .next() - .ok_or_else(|| anyhow!("no argv[0]"))?; - paths::resolve_executable(&argv0) - } - - let exe = from_current_exe() - .or_else(|_| from_argv()) - .with_context(|| "couldn't get the path to cargo executable")?; - Ok(exe) - }) - .map(AsRef::as_ref) - } - - /// Which package sources have been updated, used to ensure it is only done once. - pub fn updated_sources(&self) -> RefMut<'_, HashSet> { - self.updated_sources - .borrow_with(|| RefCell::new(HashSet::new())) - .borrow_mut() - } - - /// Gets all config values from disk. - /// - /// This will lazy-load the values as necessary. Callers are responsible - /// for checking environment variables. Callers outside of the `config` - /// module should avoid using this. - pub fn values(&self) -> CargoResult<&HashMap> { - self.values.try_borrow_with(|| self.load_values()) - } - - /// Gets a mutable copy of the on-disk config values. - /// - /// This requires the config values to already have been loaded. This - /// currently only exists for `cargo vendor` to remove the `source` - /// entries. This doesn't respect environment variables. You should avoid - /// using this if possible. - pub fn values_mut(&mut self) -> CargoResult<&mut HashMap> { - match self.values.borrow_mut() { - Some(map) => Ok(map), - None => bail!("config values not loaded yet"), - } - } - - // Note: this is used by RLS, not Cargo. - pub fn set_values(&self, values: HashMap) -> CargoResult<()> { - if self.values.borrow().is_some() { - bail!("config values already found") - } - match self.values.fill(values) { - Ok(()) => Ok(()), - Err(_) => bail!("could not fill values"), - } - } - - /// Sets the path where ancestor config file searching will stop. The - /// given path is included, but its ancestors are not. - pub fn set_search_stop_path>(&mut self, path: P) { - let path = path.into(); - debug_assert!(self.cwd.starts_with(&path)); - self.search_stop_path = Some(path); - } - - /// Reloads on-disk configuration values, starting at the given path and - /// walking up its ancestors. - pub fn reload_rooted_at>(&mut self, path: P) -> CargoResult<()> { - let values = self.load_values_from(path.as_ref())?; - self.values.replace(values); - self.merge_cli_args()?; - self.load_unstable_flags_from_config()?; - Ok(()) - } - - /// The current working directory. - pub fn cwd(&self) -> &Path { - &self.cwd - } - - /// The `target` output directory to use. - /// - /// Returns `None` if the user has not chosen an explicit directory. - /// - /// Callers should prefer `Workspace::target_dir` instead. - pub fn target_dir(&self) -> CargoResult> { - if let Some(dir) = &self.target_dir { - Ok(Some(dir.clone())) - } else if let Some(dir) = self.env.get("CARGO_TARGET_DIR") { - // Check if the CARGO_TARGET_DIR environment variable is set to an empty string. - if dir.is_empty() { - bail!( - "the target directory is set to an empty string in the \ - `CARGO_TARGET_DIR` environment variable" - ) - } - - Ok(Some(Filesystem::new(self.cwd.join(dir)))) - } else if let Some(val) = &self.build_config()?.target_dir { - let path = val.resolve_path(self); - - // Check if the target directory is set to an empty string in the config.toml file. - if val.raw_value().is_empty() { - bail!( - "the target directory is set to an empty string in {}", - val.value().definition - ) - } - - Ok(Some(Filesystem::new(path))) - } else { - Ok(None) - } - } - - /// Get a configuration value by key. - /// - /// This does NOT look at environment variables. See `get_cv_with_env` for - /// a variant that supports environment variables. - fn get_cv(&self, key: &ConfigKey) -> CargoResult> { - log::trace!("get cv {:?}", key); - let vals = self.values()?; - if key.is_root() { - // Returning the entire root table (for example `cargo config get` - // with no key). The definition here shouldn't matter. - return Ok(Some(CV::Table( - vals.clone(), - Definition::Path(PathBuf::new()), - ))); - } - let mut parts = key.parts().enumerate(); - let mut val = match vals.get(parts.next().unwrap().1) { - Some(val) => val, - None => return Ok(None), - }; - for (i, part) in parts { - match val { - CV::Table(map, _) => { - val = match map.get(part) { - Some(val) => val, - None => return Ok(None), - } - } - CV::Integer(_, def) - | CV::String(_, def) - | CV::List(_, def) - | CV::Boolean(_, def) => { - let mut key_so_far = ConfigKey::new(); - for part in key.parts().take(i) { - key_so_far.push(part); - } - bail!( - "expected table for configuration key `{}`, \ - but found {} in {}", - key_so_far, - val.desc(), - def - ) - } - } - } - Ok(Some(val.clone())) - } - - /// This is a helper for getting a CV from a file or env var. - pub(crate) fn get_cv_with_env(&self, key: &ConfigKey) -> CargoResult> { - // Determine if value comes from env, cli, or file, and merge env if - // possible. - let cv = self.get_cv(key)?; - if key.is_root() { - // Root table can't have env value. - return Ok(cv); - } - let env = self.env.get(key.as_env_key()); - let env_def = Definition::Environment(key.as_env_key().to_string()); - let use_env = match (&cv, env) { - // Lists are always merged. - (Some(CV::List(..)), Some(_)) => true, - (Some(cv), Some(_)) => env_def.is_higher_priority(cv.definition()), - (None, Some(_)) => true, - _ => false, - }; - - if !use_env { - return Ok(cv); - } - - // Future note: If you ever need to deserialize a non-self describing - // map type, this should implement a starts_with check (similar to how - // ConfigMapAccess does). - let env = env.unwrap(); - if env == "true" { - Ok(Some(CV::Boolean(true, env_def))) - } else if env == "false" { - Ok(Some(CV::Boolean(false, env_def))) - } else if let Ok(i) = env.parse::() { - Ok(Some(CV::Integer(i, env_def))) - } else if self.cli_unstable().advanced_env && env.starts_with('[') && env.ends_with(']') { - match cv { - Some(CV::List(mut cv_list, cv_def)) => { - // Merge with config file. - self.get_env_list(key, &mut cv_list)?; - Ok(Some(CV::List(cv_list, cv_def))) - } - Some(cv) => { - // This can't assume StringList or UnmergedStringList. - // Return an error, which is the behavior of merging - // multiple config.toml files with the same scenario. - bail!( - "unable to merge array env for config `{}`\n\ - file: {:?}\n\ - env: {}", - key, - cv, - env - ); - } - None => { - let mut cv_list = Vec::new(); - self.get_env_list(key, &mut cv_list)?; - Ok(Some(CV::List(cv_list, env_def))) - } - } - } else { - // Try to merge if possible. - match cv { - Some(CV::List(mut cv_list, cv_def)) => { - // Merge with config file. - self.get_env_list(key, &mut cv_list)?; - Ok(Some(CV::List(cv_list, cv_def))) - } - _ => { - // Note: CV::Table merging is not implemented, as env - // vars do not support table values. In the future, we - // could check for `{}`, and interpret it as TOML if - // that seems useful. - Ok(Some(CV::String(env.to_string(), env_def))) - } - } - } - } - - /// Helper primarily for testing. - pub fn set_env(&mut self, env: HashMap) { - self.env = env; - } - - /// Returns all environment variables. - pub(crate) fn env(&self) -> &HashMap { - &self.env - } - - fn get_env(&self, key: &ConfigKey) -> Result, ConfigError> - where - T: FromStr, - ::Err: fmt::Display, - { - match self.env.get(key.as_env_key()) { - Some(value) => { - let definition = Definition::Environment(key.as_env_key().to_string()); - Ok(Some(Value { - val: value - .parse() - .map_err(|e| ConfigError::new(format!("{}", e), definition.clone()))?, - definition, - })) - } - None => { - self.check_environment_key_case_mismatch(key); - Ok(None) - } - } - } - - fn has_key(&self, key: &ConfigKey, env_prefix_ok: bool) -> bool { - if self.env.contains_key(key.as_env_key()) { - return true; - } - // See ConfigMapAccess for a description of this. - if env_prefix_ok { - let env_prefix = format!("{}_", key.as_env_key()); - if self.env.keys().any(|k| k.starts_with(&env_prefix)) { - return true; - } - } - if let Ok(o_cv) = self.get_cv(key) { - if o_cv.is_some() { - return true; - } - } - self.check_environment_key_case_mismatch(key); - - false - } - - fn check_environment_key_case_mismatch(&self, key: &ConfigKey) { - if let Some(env_key) = self.upper_case_env.get(key.as_env_key()) { - let _ = self.shell().warn(format!( - "Environment variables are expected to use uppercase letters and underscores, \ - the variable `{}` will be ignored and have no effect", - env_key - )); - } - } - - /// Get a string config value. - /// - /// See `get` for more details. - pub fn get_string(&self, key: &str) -> CargoResult> { - self.get::>>(key) - } - - /// Get a config value that is expected to be a path. - /// - /// This returns a relative path if the value does not contain any - /// directory separators. See `ConfigRelativePath::resolve_program` for - /// more details. - pub fn get_path(&self, key: &str) -> CargoResult> { - self.get::>>(key).map(|v| { - v.map(|v| Value { - val: v.val.resolve_program(self), - definition: v.definition, - }) - }) - } - - fn string_to_path(&self, value: &str, definition: &Definition) -> PathBuf { - let is_path = value.contains('/') || (cfg!(windows) && value.contains('\\')); - if is_path { - definition.root(self).join(value) - } else { - // A pathless name. - PathBuf::from(value) - } - } - - /// Get a list of strings. - /// - /// DO NOT USE outside of the config module. `pub` will be removed in the - /// future. - /// - /// NOTE: this does **not** support environment variables. Use `get` instead - /// if you want that. - pub fn get_list(&self, key: &str) -> CargoResult>> { - let key = ConfigKey::from_str(key); - self._get_list(&key) - } - - fn _get_list(&self, key: &ConfigKey) -> CargoResult>> { - match self.get_cv(key)? { - Some(CV::List(val, definition)) => Ok(Some(Value { val, definition })), - Some(val) => self.expected("list", key, &val), - None => Ok(None), - } - } - - /// Helper for StringList type to get something that is a string or list. - fn get_list_or_string( - &self, - key: &ConfigKey, - merge: bool, - ) -> CargoResult> { - let mut res = Vec::new(); - - if !merge { - self.get_env_list(key, &mut res)?; - - if !res.is_empty() { - return Ok(res); - } - } - - match self.get_cv(key)? { - Some(CV::List(val, _def)) => res.extend(val), - Some(CV::String(val, def)) => { - let split_vs = val.split_whitespace().map(|s| (s.to_string(), def.clone())); - res.extend(split_vs); - } - Some(val) => { - return self.expected("string or array of strings", key, &val); - } - None => {} - } - - self.get_env_list(key, &mut res)?; - - Ok(res) - } - - /// Internal method for getting an environment variable as a list. - fn get_env_list( - &self, - key: &ConfigKey, - output: &mut Vec<(String, Definition)>, - ) -> CargoResult<()> { - let env_val = match self.env.get(key.as_env_key()) { - Some(v) => v, - None => { - self.check_environment_key_case_mismatch(key); - return Ok(()); - } - }; - - let def = Definition::Environment(key.as_env_key().to_string()); - if self.cli_unstable().advanced_env && env_val.starts_with('[') && env_val.ends_with(']') { - // Parse an environment string as a TOML array. - let toml_s = format!("value={}", env_val); - let toml_v: toml::Value = toml::de::from_str(&toml_s).map_err(|e| { - ConfigError::new(format!("could not parse TOML list: {}", e), def.clone()) - })?; - let values = toml_v - .as_table() - .unwrap() - .get("value") - .unwrap() - .as_array() - .expect("env var was not array"); - for value in values { - // TODO: support other types. - let s = value.as_str().ok_or_else(|| { - ConfigError::new( - format!("expected string, found {}", value.type_str()), - def.clone(), - ) - })?; - output.push((s.to_string(), def.clone())); - } - } else { - output.extend( - env_val - .split_whitespace() - .map(|s| (s.to_string(), def.clone())), - ); - } - Ok(()) - } - - /// Low-level method for getting a config value as an `OptValue>`. - /// - /// NOTE: This does not read from env. The caller is responsible for that. - fn get_table(&self, key: &ConfigKey) -> CargoResult>> { - match self.get_cv(key)? { - Some(CV::Table(val, definition)) => Ok(Some(Value { val, definition })), - Some(val) => self.expected("table", key, &val), - None => Ok(None), - } - } - - get_value_typed! {get_integer, i64, Integer, "an integer"} - get_value_typed! {get_bool, bool, Boolean, "true/false"} - get_value_typed! {get_string_priv, String, String, "a string"} - - /// Generate an error when the given value is the wrong type. - fn expected(&self, ty: &str, key: &ConfigKey, val: &CV) -> CargoResult { - val.expected(ty, &key.to_string()) - .map_err(|e| anyhow!("invalid configuration for key `{}`\n{}", key, e)) - } - - /// Update the Config instance based on settings typically passed in on - /// the command-line. - /// - /// This may also load the config from disk if it hasn't already been - /// loaded. - pub fn configure( - &mut self, - verbose: u32, - quiet: bool, - color: Option<&str>, - frozen: bool, - locked: bool, - offline: bool, - target_dir: &Option, - unstable_flags: &[String], - cli_config: &[String], - ) -> CargoResult<()> { - for warning in self - .unstable_flags - .parse(unstable_flags, self.nightly_features_allowed)? - { - self.shell().warn(warning)?; - } - if !unstable_flags.is_empty() { - // store a copy of the cli flags separately for `load_unstable_flags_from_config` - // (we might also need it again for `reload_rooted_at`) - self.unstable_flags_cli = Some(unstable_flags.to_vec()); - } - if !cli_config.is_empty() { - self.unstable_flags.fail_if_stable_opt("--config", 6699)?; - self.cli_config = Some(cli_config.iter().map(|s| s.to_string()).collect()); - self.merge_cli_args()?; - } - if self.unstable_flags.config_include { - // If the config was already loaded (like when fetching the - // `[alias]` table), it was loaded with includes disabled because - // the `unstable_flags` hadn't been set up, yet. Any values - // fetched before this step will not process includes, but that - // should be fine (`[alias]` is one of the only things loaded - // before configure). This can be removed when stabilized. - self.reload_rooted_at(self.cwd.clone())?; - } - let extra_verbose = verbose >= 2; - let verbose = verbose != 0; - - // Ignore errors in the configuration files. We don't want basic - // commands like `cargo version` to error out due to config file - // problems. - let term = self.get::("term").unwrap_or_default(); - - let color = color.or_else(|| term.color.as_deref()); - - // The command line takes precedence over configuration. - let verbosity = match (verbose, quiet) { - (true, true) => bail!("cannot set both --verbose and --quiet"), - (true, false) => Verbosity::Verbose, - (false, true) => Verbosity::Quiet, - (false, false) => match (term.verbose, term.quiet) { - (Some(true), Some(true)) => { - bail!("cannot set both `term.verbose` and `term.quiet`") - } - (Some(true), Some(false)) => Verbosity::Verbose, - (Some(false), Some(true)) => Verbosity::Quiet, - _ => Verbosity::Normal, - }, - }; - - let cli_target_dir = target_dir.as_ref().map(|dir| Filesystem::new(dir.clone())); - - self.shell().set_verbosity(verbosity); - self.shell().set_color_choice(color)?; - self.progress_config = term.progress.unwrap_or_default(); - self.extra_verbose = extra_verbose; - self.frozen = frozen; - self.locked = locked; - self.offline = offline - || self - .net_config() - .ok() - .and_then(|n| n.offline) - .unwrap_or(false); - self.target_dir = cli_target_dir; - - self.load_unstable_flags_from_config()?; - - Ok(()) - } - - fn load_unstable_flags_from_config(&mut self) -> CargoResult<()> { - // If nightly features are enabled, allow setting Z-flags from config - // using the `unstable` table. Ignore that block otherwise. - if self.nightly_features_allowed { - self.unstable_flags = self - .get::>("unstable")? - .unwrap_or_default(); - if let Some(unstable_flags_cli) = &self.unstable_flags_cli { - // NB. It's not ideal to parse these twice, but doing it again here - // allows the CLI to override config files for both enabling - // and disabling, and doing it up top allows CLI Zflags to - // control config parsing behavior. - self.unstable_flags.parse(unstable_flags_cli, true)?; - } - } - - Ok(()) - } - - pub fn cli_unstable(&self) -> &CliUnstable { - &self.unstable_flags - } - - pub fn extra_verbose(&self) -> bool { - self.extra_verbose - } - - pub fn network_allowed(&self) -> bool { - !self.frozen() && !self.offline() - } - - pub fn offline(&self) -> bool { - self.offline - } - - pub fn frozen(&self) -> bool { - self.frozen - } - - pub fn locked(&self) -> bool { - self.locked - } - - pub fn lock_update_allowed(&self) -> bool { - !self.frozen && !self.locked - } - - /// Loads configuration from the filesystem. - pub fn load_values(&self) -> CargoResult> { - self.load_values_from(&self.cwd) - } - - pub(crate) fn load_values_unmerged(&self) -> CargoResult> { - let mut result = Vec::new(); - let mut seen = HashSet::new(); - let home = self.home_path.clone().into_path_unlocked(); - self.walk_tree(&self.cwd, &home, |path| { - let mut cv = self._load_file(path, &mut seen, false)?; - if self.cli_unstable().config_include { - self.load_unmerged_include(&mut cv, &mut seen, &mut result)?; - } - result.push(cv); - Ok(()) - }) - .with_context(|| "could not load Cargo configuration")?; - Ok(result) - } - - fn load_unmerged_include( - &self, - cv: &mut CV, - seen: &mut HashSet, - output: &mut Vec, - ) -> CargoResult<()> { - let includes = self.include_paths(cv, false)?; - for (path, abs_path, def) in includes { - let mut cv = self._load_file(&abs_path, seen, false).with_context(|| { - format!("failed to load config include `{}` from `{}`", path, def) - })?; - self.load_unmerged_include(&mut cv, seen, output)?; - output.push(cv); - } - Ok(()) - } - - fn load_values_from(&self, path: &Path) -> CargoResult> { - // This definition path is ignored, this is just a temporary container - // representing the entire file. - let mut cfg = CV::Table(HashMap::new(), Definition::Path(PathBuf::from("."))); - let home = self.home_path.clone().into_path_unlocked(); - - self.walk_tree(path, &home, |path| { - let value = self.load_file(path, true)?; - cfg.merge(value, false).with_context(|| { - format!("failed to merge configuration at `{}`", path.display()) - })?; - Ok(()) - }) - .with_context(|| "could not load Cargo configuration")?; - - match cfg { - CV::Table(map, _) => Ok(map), - _ => unreachable!(), - } - } - - fn load_file(&self, path: &Path, includes: bool) -> CargoResult { - let mut seen = HashSet::new(); - self._load_file(path, &mut seen, includes) - } - - fn _load_file( - &self, - path: &Path, - seen: &mut HashSet, - includes: bool, - ) -> CargoResult { - if !seen.insert(path.to_path_buf()) { - bail!( - "config `include` cycle detected with path `{}`", - path.display() - ); - } - let contents = fs::read_to_string(path) - .with_context(|| format!("failed to read configuration file `{}`", path.display()))?; - let toml = cargo_toml::parse(&contents, path, self).with_context(|| { - format!("could not parse TOML configuration in `{}`", path.display()) - })?; - let value = - CV::from_toml(Definition::Path(path.to_path_buf()), toml).with_context(|| { - format!( - "failed to load TOML configuration from `{}`", - path.display() - ) - })?; - if includes { - self.load_includes(value, seen) - } else { - Ok(value) - } - } - - /// Load any `include` files listed in the given `value`. - /// - /// Returns `value` with the given include files merged into it. - /// - /// `seen` is used to check for cyclic includes. - fn load_includes(&self, mut value: CV, seen: &mut HashSet) -> CargoResult { - // Get the list of files to load. - let includes = self.include_paths(&mut value, true)?; - // Check unstable. - if !self.cli_unstable().config_include { - return Ok(value); - } - // Accumulate all values here. - let mut root = CV::Table(HashMap::new(), value.definition().clone()); - for (path, abs_path, def) in includes { - self._load_file(&abs_path, seen, true) - .and_then(|include| root.merge(include, true)) - .with_context(|| { - format!("failed to load config include `{}` from `{}`", path, def) - })?; - } - root.merge(value, true)?; - Ok(root) - } - - /// Converts the `include` config value to a list of absolute paths. - fn include_paths( - &self, - cv: &mut CV, - remove: bool, - ) -> CargoResult> { - let abs = |path: &String, def: &Definition| -> (String, PathBuf, Definition) { - let abs_path = match def { - Definition::Path(p) => p.parent().unwrap().join(&path), - Definition::Environment(_) | Definition::Cli => self.cwd().join(&path), - }; - (path.to_string(), abs_path, def.clone()) - }; - let table = match cv { - CV::Table(table, _def) => table, - _ => unreachable!(), - }; - let owned; - let include = if remove { - owned = table.remove("include"); - owned.as_ref() - } else { - table.get("include") - }; - let includes = match include { - Some(CV::String(s, def)) => { - vec![abs(s, def)] - } - Some(CV::List(list, _def)) => list.iter().map(|(s, def)| abs(s, def)).collect(), - Some(other) => bail!( - "`include` expected a string or list, but found {} in `{}`", - other.desc(), - other.definition() - ), - None => { - return Ok(Vec::new()); - } - }; - Ok(includes) - } - - /// Parses the CLI config args and returns them as a table. - pub(crate) fn cli_args_as_table(&self) -> CargoResult { - let mut loaded_args = CV::Table(HashMap::new(), Definition::Cli); - let cli_args = match &self.cli_config { - Some(cli_args) => cli_args, - None => return Ok(loaded_args), - }; - for arg in cli_args { - let arg_as_path = self.cwd.join(arg); - let tmp_table = if !arg.is_empty() && arg_as_path.exists() { - // --config path_to_file - let str_path = arg_as_path - .to_str() - .ok_or_else(|| { - anyhow::format_err!("config path {:?} is not utf-8", arg_as_path) - })? - .to_string(); - let mut map = HashMap::new(); - let value = CV::String(str_path, Definition::Cli); - map.insert("include".to_string(), value); - CV::Table(map, Definition::Cli) - } else { - // TODO: This should probably use a more narrow parser, reject - // comments, blank lines, [headers], etc. - let toml_v: toml::Value = toml::de::from_str(arg) - .with_context(|| format!("failed to parse --config argument `{}`", arg))?; - let toml_table = toml_v.as_table().unwrap(); - if toml_table.len() != 1 { - bail!( - "--config argument `{}` expected exactly one key=value pair, got {} keys", - arg, - toml_table.len() - ); - } - CV::from_toml(Definition::Cli, toml_v) - .with_context(|| format!("failed to convert --config argument `{}`", arg))? - }; - let mut seen = HashSet::new(); - let tmp_table = self - .load_includes(tmp_table, &mut seen) - .with_context(|| "failed to load --config include".to_string())?; - loaded_args - .merge(tmp_table, true) - .with_context(|| format!("failed to merge --config argument `{}`", arg))?; - } - Ok(loaded_args) - } - - /// Add config arguments passed on the command line. - fn merge_cli_args(&mut self) -> CargoResult<()> { - let loaded_map = match self.cli_args_as_table()? { - CV::Table(table, _def) => table, - _ => unreachable!(), - }; - // Force values to be loaded. - let _ = self.values()?; - let values = self.values_mut()?; - for (key, value) in loaded_map.into_iter() { - match values.entry(key) { - Vacant(entry) => { - entry.insert(value); - } - Occupied(mut entry) => entry.get_mut().merge(value, true).with_context(|| { - format!( - "failed to merge --config key `{}` into `{}`", - entry.key(), - entry.get().definition(), - ) - })?, - }; - } - Ok(()) - } - - /// The purpose of this function is to aid in the transition to using - /// .toml extensions on Cargo's config files, which were historically not used. - /// Both 'config.toml' and 'credentials.toml' should be valid with or without extension. - /// When both exist, we want to prefer the one without an extension for - /// backwards compatibility, but warn the user appropriately. - fn get_file_path( - &self, - dir: &Path, - filename_without_extension: &str, - warn: bool, - ) -> CargoResult> { - let possible = dir.join(filename_without_extension); - let possible_with_extension = dir.join(format!("{}.toml", filename_without_extension)); - - if possible.exists() { - if warn && possible_with_extension.exists() { - // We don't want to print a warning if the version - // without the extension is just a symlink to the version - // WITH an extension, which people may want to do to - // support multiple Cargo versions at once and not - // get a warning. - let skip_warning = if let Ok(target_path) = fs::read_link(&possible) { - target_path == possible_with_extension - } else { - false - }; - - if !skip_warning { - self.shell().warn(format!( - "Both `{}` and `{}` exist. Using `{}`", - possible.display(), - possible_with_extension.display(), - possible.display() - ))?; - } - } - - Ok(Some(possible)) - } else if possible_with_extension.exists() { - Ok(Some(possible_with_extension)) - } else { - Ok(None) - } - } - - fn walk_tree(&self, pwd: &Path, home: &Path, mut walk: F) -> CargoResult<()> - where - F: FnMut(&Path) -> CargoResult<()>, - { - let mut stash: HashSet = HashSet::new(); - - for current in paths::ancestors(pwd, self.search_stop_path.as_deref()) { - if let Some(path) = self.get_file_path(¤t.join(".cargo"), "config", true)? { - walk(&path)?; - stash.insert(path); - } - } - - // Once we're done, also be sure to walk the home directory even if it's not - // in our history to be sure we pick up that standard location for - // information. - if let Some(path) = self.get_file_path(home, "config", true)? { - if !stash.contains(&path) { - walk(&path)?; - } - } - - Ok(()) - } - - /// Gets the index for a registry. - pub fn get_registry_index(&self, registry: &str) -> CargoResult { - validate_package_name(registry, "registry name", "")?; - if let Some(index) = self.get_string(&format!("registries.{}.index", registry))? { - self.resolve_registry_index(&index).with_context(|| { - format!( - "invalid index URL for registry `{}` defined in {}", - registry, index.definition - ) - }) - } else { - bail!("no index found for registry: `{}`", registry); - } - } - - /// Returns an error if `registry.index` is set. - pub fn check_registry_index_not_set(&self) -> CargoResult<()> { - if self.get_string("registry.index")?.is_some() { - bail!( - "the `registry.index` config value is no longer supported\n\ - Use `[source]` replacement to alter the default index for crates.io." - ); - } - Ok(()) - } - - fn resolve_registry_index(&self, index: &Value) -> CargoResult { - // This handles relative file: URLs, relative to the config definition. - let base = index - .definition - .root(self) - .join("truncated-by-url_with_base"); - // Parse val to check it is a URL, not a relative path without a protocol. - let _parsed = index.val.into_url()?; - let url = index.val.into_url_with_base(Some(&*base))?; - if url.password().is_some() { - bail!("registry URLs may not contain passwords"); - } - Ok(url) - } - - /// Loads credentials config from the credentials file, if present. - pub fn load_credentials(&mut self) -> CargoResult<()> { - let home_path = self.home_path.clone().into_path_unlocked(); - let credentials = match self.get_file_path(&home_path, "credentials", true)? { - Some(credentials) => credentials, - None => return Ok(()), - }; - - let mut value = self.load_file(&credentials, true)?; - // Backwards compatibility for old `.cargo/credentials` layout. - { - let (value_map, def) = match value { - CV::Table(ref mut value, ref def) => (value, def), - _ => unreachable!(), - }; - - if let Some(token) = value_map.remove("token") { - if let Vacant(entry) = value_map.entry("registry".into()) { - let mut map = HashMap::new(); - map.insert("token".into(), token); - let table = CV::Table(map, def.clone()); - entry.insert(table); - } - } - } - - if let CV::Table(map, _) = value { - let base_map = self.values_mut()?; - for (k, v) in map { - match base_map.entry(k) { - Vacant(entry) => { - entry.insert(v); - } - Occupied(mut entry) => { - entry.get_mut().merge(v, true)?; - } - } - } - } - - Ok(()) - } - - /// Looks for a path for `tool` in an environment variable or the given config, and returns - /// `None` if it's not present. - fn maybe_get_tool( - &self, - tool: &str, - from_config: &Option, - ) -> Option { - let var = tool.to_uppercase(); - - match env::var_os(&var) { - Some(tool_path) => { - let maybe_relative = match tool_path.to_str() { - Some(s) => s.contains('/') || s.contains('\\'), - None => false, - }; - let path = if maybe_relative { - self.cwd.join(tool_path) - } else { - PathBuf::from(tool_path) - }; - Some(path) - } - - None => from_config.as_ref().map(|p| p.resolve_program(self)), - } - } - - /// Looks for a path for `tool` in an environment variable or config path, defaulting to `tool` - /// as a path. - fn get_tool(&self, tool: &str, from_config: &Option) -> PathBuf { - self.maybe_get_tool(tool, from_config) - .unwrap_or_else(|| PathBuf::from(tool)) - } - - pub fn jobserver_from_env(&self) -> Option<&jobserver::Client> { - self.jobserver.as_ref() - } - - pub fn http(&self) -> CargoResult<&RefCell> { - let http = self - .easy - .try_borrow_with(|| ops::http_handle(self).map(RefCell::new))?; - { - let mut http = http.borrow_mut(); - http.reset(); - let timeout = ops::configure_http_handle(self, &mut http)?; - timeout.configure(&mut http)?; - } - Ok(http) - } - - pub fn http_config(&self) -> CargoResult<&CargoHttpConfig> { - self.http_config - .try_borrow_with(|| self.get::("http")) - } - - pub fn future_incompat_config(&self) -> CargoResult<&CargoFutureIncompatConfig> { - self.future_incompat_config - .try_borrow_with(|| self.get::("future-incompat-report")) - } - - pub fn net_config(&self) -> CargoResult<&CargoNetConfig> { - self.net_config - .try_borrow_with(|| self.get::("net")) - } - - pub fn build_config(&self) -> CargoResult<&CargoBuildConfig> { - self.build_config - .try_borrow_with(|| self.get::("build")) - } - - pub fn progress_config(&self) -> &ProgressConfig { - &self.progress_config - } - - pub fn env_config(&self) -> CargoResult<&EnvConfig> { - self.env_config - .try_borrow_with(|| self.get::("env")) - } - - /// This is used to validate the `term` table has valid syntax. - /// - /// This is necessary because loading the term settings happens very - /// early, and in some situations (like `cargo version`) we don't want to - /// fail if there are problems with the config file. - pub fn validate_term_config(&self) -> CargoResult<()> { - drop(self.get::("term")?); - Ok(()) - } - - /// Returns a list of [target.'cfg()'] tables. - /// - /// The list is sorted by the table name. - pub fn target_cfgs(&self) -> CargoResult<&Vec<(String, TargetCfgConfig)>> { - self.target_cfgs - .try_borrow_with(|| target::load_target_cfgs(self)) - } - - pub fn doc_extern_map(&self) -> CargoResult<&RustdocExternMap> { - // Note: This does not support environment variables. The `Unit` - // fundamentally does not have access to the registry name, so there is - // nothing to query. Plumbing the name into SourceId is quite challenging. - self.doc_extern_map - .try_borrow_with(|| self.get::("doc.extern-map")) - } - - /// Returns true if the `[target]` table should be applied to host targets. - pub fn target_applies_to_host(&self) -> CargoResult { - target::get_target_applies_to_host(self) - } - - /// Returns the `[host]` table definition for the given target triple. - pub fn host_cfg_triple(&self, target: &str) -> CargoResult { - target::load_host_triple(self, target) - } - - /// Returns the `[target]` table definition for the given target triple. - pub fn target_cfg_triple(&self, target: &str) -> CargoResult { - target::load_target_triple(self, target) - } - - pub fn crates_io_source_id(&self, f: F) -> CargoResult - where - F: FnMut() -> CargoResult, - { - Ok(*(self.crates_io_source_id.try_borrow_with(f)?)) - } - - pub fn creation_time(&self) -> Instant { - self.creation_time - } - - /// Retrieves a config variable. - /// - /// This supports most serde `Deserialize` types. Examples: - /// - /// ```rust,ignore - /// let v: Option = config.get("some.nested.key")?; - /// let v: Option = config.get("some.key")?; - /// let v: Option> = config.get("foo")?; - /// ``` - /// - /// The key may be a dotted key, but this does NOT support TOML key - /// quoting. Avoid key components that may have dots. For example, - /// `foo.'a.b'.bar" does not work if you try to fetch `foo.'a.b'". You can - /// fetch `foo` if it is a map, though. - pub fn get<'de, T: serde::de::Deserialize<'de>>(&self, key: &str) -> CargoResult { - let d = Deserializer { - config: self, - key: ConfigKey::from_str(key), - env_prefix_ok: true, - }; - T::deserialize(d).map_err(|e| e.into()) - } - - pub fn assert_package_cache_locked<'a>(&self, f: &'a Filesystem) -> &'a Path { - let ret = f.as_path_unlocked(); - assert!( - self.package_cache_lock.borrow().is_some(), - "package cache lock is not currently held, Cargo forgot to call \ - `acquire_package_cache_lock` before we got to this stack frame", - ); - assert!(ret.starts_with(self.home_path.as_path_unlocked())); - ret - } - - /// Acquires an exclusive lock on the global "package cache" - /// - /// This lock is global per-process and can be acquired recursively. An RAII - /// structure is returned to release the lock, and if this process - /// abnormally terminates the lock is also released. - pub fn acquire_package_cache_lock(&self) -> CargoResult> { - let mut slot = self.package_cache_lock.borrow_mut(); - match *slot { - // We've already acquired the lock in this process, so simply bump - // the count and continue. - Some((_, ref mut cnt)) => { - *cnt += 1; - } - None => { - let path = ".package-cache"; - let desc = "package cache"; - - // First, attempt to open an exclusive lock which is in general - // the purpose of this lock! - // - // If that fails because of a readonly filesystem or a - // permission error, though, then we don't really want to fail - // just because of this. All files that this lock protects are - // in subfolders, so they're assumed by Cargo to also be - // readonly or have invalid permissions for us to write to. If - // that's the case, then we don't really need to grab a lock in - // the first place here. - // - // Despite this we attempt to grab a readonly lock. This means - // that if our read-only folder is shared read-write with - // someone else on the system we should synchronize with them, - // but if we can't even do that then we did our best and we just - // keep on chugging elsewhere. - match self.home_path.open_rw(path, self, desc) { - Ok(lock) => *slot = Some((Some(lock), 1)), - Err(e) => { - if maybe_readonly(&e) { - let lock = self.home_path.open_ro(path, self, desc).ok(); - *slot = Some((lock, 1)); - return Ok(PackageCacheLock(self)); - } - - Err(e).with_context(|| "failed to acquire package cache lock")?; - } - } - } - } - return Ok(PackageCacheLock(self)); - - fn maybe_readonly(err: &anyhow::Error) -> bool { - err.chain().any(|err| { - if let Some(io) = err.downcast_ref::() { - if io.kind() == io::ErrorKind::PermissionDenied { - return true; - } - - #[cfg(unix)] - return io.raw_os_error() == Some(libc::EROFS); - } - - false - }) - } - } - - pub fn release_package_cache_lock(&self) {} -} - -/// Internal error for serde errors. -#[derive(Debug)] -pub struct ConfigError { - error: anyhow::Error, - definition: Option, -} - -impl ConfigError { - fn new(message: String, definition: Definition) -> ConfigError { - ConfigError { - error: anyhow::Error::msg(message), - definition: Some(definition), - } - } - - fn expected(key: &ConfigKey, expected: &str, found: &ConfigValue) -> ConfigError { - ConfigError { - error: anyhow!( - "`{}` expected {}, but found a {}", - key, - expected, - found.desc() - ), - definition: Some(found.definition().clone()), - } - } - - fn missing(key: &ConfigKey) -> ConfigError { - ConfigError { - error: anyhow!("missing config key `{}`", key), - definition: None, - } - } - - fn with_key_context(self, key: &ConfigKey, definition: Definition) -> ConfigError { - ConfigError { - error: anyhow::Error::from(self) - .context(format!("could not load config key `{}`", key)), - definition: Some(definition), - } - } -} - -impl std::error::Error for ConfigError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.error.source() - } -} - -impl fmt::Display for ConfigError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(definition) = &self.definition { - write!(f, "error in {}: {}", definition, self.error) - } else { - self.error.fmt(f) - } - } -} - -impl serde::de::Error for ConfigError { - fn custom(msg: T) -> Self { - ConfigError { - error: anyhow::Error::msg(msg.to_string()), - definition: None, - } - } -} - -impl From for ConfigError { - fn from(error: anyhow::Error) -> Self { - ConfigError { - error, - definition: None, - } - } -} - -#[derive(Eq, PartialEq, Clone)] -pub enum ConfigValue { - Integer(i64, Definition), - String(String, Definition), - List(Vec<(String, Definition)>, Definition), - Table(HashMap, Definition), - Boolean(bool, Definition), -} - -impl fmt::Debug for ConfigValue { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - CV::Integer(i, def) => write!(f, "{} (from {})", i, def), - CV::Boolean(b, def) => write!(f, "{} (from {})", b, def), - CV::String(s, def) => write!(f, "{} (from {})", s, def), - CV::List(list, def) => { - write!(f, "[")?; - for (i, (s, def)) in list.iter().enumerate() { - if i > 0 { - write!(f, ", ")?; - } - write!(f, "{} (from {})", s, def)?; - } - write!(f, "] (from {})", def) - } - CV::Table(table, _) => write!(f, "{:?}", table), - } - } -} - -impl ConfigValue { - fn from_toml(def: Definition, toml: toml::Value) -> CargoResult { - match toml { - toml::Value::String(val) => Ok(CV::String(val, def)), - toml::Value::Boolean(b) => Ok(CV::Boolean(b, def)), - toml::Value::Integer(i) => Ok(CV::Integer(i, def)), - toml::Value::Array(val) => Ok(CV::List( - val.into_iter() - .map(|toml| match toml { - toml::Value::String(val) => Ok((val, def.clone())), - v => bail!("expected string but found {} in list", v.type_str()), - }) - .collect::>()?, - def, - )), - toml::Value::Table(val) => Ok(CV::Table( - val.into_iter() - .map(|(key, value)| { - let value = CV::from_toml(def.clone(), value) - .with_context(|| format!("failed to parse key `{}`", key))?; - Ok((key, value)) - }) - .collect::>()?, - def, - )), - v => bail!( - "found TOML configuration value of unknown type `{}`", - v.type_str() - ), - } - } - - fn into_toml(self) -> toml::Value { - match self { - CV::Boolean(s, _) => toml::Value::Boolean(s), - CV::String(s, _) => toml::Value::String(s), - CV::Integer(i, _) => toml::Value::Integer(i), - CV::List(l, _) => { - toml::Value::Array(l.into_iter().map(|(s, _)| toml::Value::String(s)).collect()) - } - CV::Table(l, _) => { - toml::Value::Table(l.into_iter().map(|(k, v)| (k, v.into_toml())).collect()) - } - } - } - - /// Merge the given value into self. - /// - /// If `force` is true, primitive (non-container) types will override existing values. - /// If false, the original will be kept and the new value ignored. - /// - /// Container types (tables and arrays) are merged with existing values. - /// - /// Container and non-container types cannot be mixed. - fn merge(&mut self, from: ConfigValue, force: bool) -> CargoResult<()> { - match (self, from) { - (&mut CV::List(ref mut old, _), CV::List(ref mut new, _)) => { - old.extend(mem::take(new).into_iter()); - } - (&mut CV::Table(ref mut old, _), CV::Table(ref mut new, _)) => { - for (key, value) in mem::take(new) { - match old.entry(key.clone()) { - Occupied(mut entry) => { - let new_def = value.definition().clone(); - let entry = entry.get_mut(); - entry.merge(value, force).with_context(|| { - format!( - "failed to merge key `{}` between \ - {} and {}", - key, - entry.definition(), - new_def, - ) - })?; - } - Vacant(entry) => { - entry.insert(value); - } - }; - } - } - // Allow switching types except for tables or arrays. - (expected @ &mut CV::List(_, _), found) - | (expected @ &mut CV::Table(_, _), found) - | (expected, found @ CV::List(_, _)) - | (expected, found @ CV::Table(_, _)) => { - return Err(anyhow!( - "failed to merge config value from `{}` into `{}`: expected {}, but found {}", - found.definition(), - expected.definition(), - expected.desc(), - found.desc() - )); - } - (old, mut new) => { - if force || new.definition().is_higher_priority(old.definition()) { - mem::swap(old, &mut new); - } - } - } - - Ok(()) - } - - pub fn i64(&self, key: &str) -> CargoResult<(i64, &Definition)> { - match self { - CV::Integer(i, def) => Ok((*i, def)), - _ => self.expected("integer", key), - } - } - - pub fn string(&self, key: &str) -> CargoResult<(&str, &Definition)> { - match self { - CV::String(s, def) => Ok((s, def)), - _ => self.expected("string", key), - } - } - - pub fn table(&self, key: &str) -> CargoResult<(&HashMap, &Definition)> { - match self { - CV::Table(table, def) => Ok((table, def)), - _ => self.expected("table", key), - } - } - - pub fn list(&self, key: &str) -> CargoResult<&[(String, Definition)]> { - match self { - CV::List(list, _) => Ok(list), - _ => self.expected("list", key), - } - } - - pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Definition)> { - match self { - CV::Boolean(b, def) => Ok((*b, def)), - _ => self.expected("bool", key), - } - } - - pub fn desc(&self) -> &'static str { - match *self { - CV::Table(..) => "table", - CV::List(..) => "array", - CV::String(..) => "string", - CV::Boolean(..) => "boolean", - CV::Integer(..) => "integer", - } - } - - pub fn definition(&self) -> &Definition { - match self { - CV::Boolean(_, def) - | CV::Integer(_, def) - | CV::String(_, def) - | CV::List(_, def) - | CV::Table(_, def) => def, - } - } - - fn expected(&self, wanted: &str, key: &str) -> CargoResult { - bail!( - "expected a {}, but found a {} for `{}` in {}", - wanted, - self.desc(), - key, - self.definition() - ) - } -} - -pub fn homedir(cwd: &Path) -> Option { - ::home::cargo_home_with_cwd(cwd).ok() -} - -pub fn save_credentials( - cfg: &Config, - token: Option, - registry: Option<&str>, -) -> CargoResult<()> { - // If 'credentials.toml' exists, we should write to that, otherwise - // use the legacy 'credentials'. There's no need to print the warning - // here, because it would already be printed at load time. - let home_path = cfg.home_path.clone().into_path_unlocked(); - let filename = match cfg.get_file_path(&home_path, "credentials", false)? { - Some(path) => match path.file_name() { - Some(filename) => Path::new(filename).to_owned(), - None => Path::new("credentials").to_owned(), - }, - None => Path::new("credentials").to_owned(), - }; - - let mut file = { - cfg.home_path.create_dir()?; - cfg.home_path - .open_rw(filename, cfg, "credentials' config file")? - }; - - let mut contents = String::new(); - file.read_to_string(&mut contents).with_context(|| { - format!( - "failed to read configuration file `{}`", - file.path().display() - ) - })?; - - let mut toml = cargo_toml::parse(&contents, file.path(), cfg)?; - - // Move the old token location to the new one. - if let Some(token) = toml.as_table_mut().unwrap().remove("token") { - let mut map = HashMap::new(); - map.insert("token".to_string(), token); - toml.as_table_mut() - .unwrap() - .insert("registry".into(), map.into()); - } - - if let Some(token) = token { - // login - let (key, mut value) = { - let key = "token".to_string(); - let value = ConfigValue::String(token, Definition::Path(file.path().to_path_buf())); - let mut map = HashMap::new(); - map.insert(key, value); - let table = CV::Table(map, Definition::Path(file.path().to_path_buf())); - - if let Some(registry) = registry { - let mut map = HashMap::new(); - map.insert(registry.to_string(), table); - ( - "registries".into(), - CV::Table(map, Definition::Path(file.path().to_path_buf())), - ) - } else { - ("registry".into(), table) - } - }; - - if registry.is_some() { - if let Some(table) = toml.as_table_mut().unwrap().remove("registries") { - let v = CV::from_toml(Definition::Path(file.path().to_path_buf()), table)?; - value.merge(v, false)?; - } - } - toml.as_table_mut().unwrap().insert(key, value.into_toml()); - } else { - // logout - let table = toml.as_table_mut().unwrap(); - if let Some(registry) = registry { - if let Some(registries) = table.get_mut("registries") { - if let Some(reg) = registries.get_mut(registry) { - let rtable = reg.as_table_mut().ok_or_else(|| { - format_err!("expected `[registries.{}]` to be a table", registry) - })?; - rtable.remove("token"); - } - } - } else if let Some(registry) = table.get_mut("registry") { - let reg_table = registry - .as_table_mut() - .ok_or_else(|| format_err!("expected `[registry]` to be a table"))?; - reg_table.remove("token"); - } - } - - let contents = toml.to_string(); - file.seek(SeekFrom::Start(0))?; - file.write_all(contents.as_bytes()) - .with_context(|| format!("failed to write to `{}`", file.path().display()))?; - file.file().set_len(contents.len() as u64)?; - set_permissions(file.file(), 0o600) - .with_context(|| format!("failed to set permissions of `{}`", file.path().display()))?; - - return Ok(()); - - #[cfg(unix)] - fn set_permissions(file: &File, mode: u32) -> CargoResult<()> { - use std::os::unix::fs::PermissionsExt; - - let mut perms = file.metadata()?.permissions(); - perms.set_mode(mode); - file.set_permissions(perms)?; - Ok(()) - } - - #[cfg(not(unix))] - #[allow(unused)] - fn set_permissions(file: &File, mode: u32) -> CargoResult<()> { - Ok(()) - } -} - -pub struct PackageCacheLock<'a>(&'a Config); - -impl Drop for PackageCacheLock<'_> { - fn drop(&mut self) { - let mut slot = self.0.package_cache_lock.borrow_mut(); - let (_, cnt) = slot.as_mut().unwrap(); - *cnt -= 1; - if *cnt == 0 { - *slot = None; - } - } -} - -#[derive(Debug, Default, Deserialize, PartialEq)] -#[serde(rename_all = "kebab-case")] -pub struct CargoHttpConfig { - pub proxy: Option, - pub low_speed_limit: Option, - pub timeout: Option, - pub cainfo: Option, - pub check_revoke: Option, - pub user_agent: Option, - pub debug: Option, - pub multiplexing: Option, - pub ssl_version: Option, -} - -#[derive(Debug, Default, Deserialize, PartialEq)] -#[serde(rename_all = "kebab-case")] -pub struct CargoFutureIncompatConfig { - frequency: Option, -} - -#[derive(Debug, Deserialize, PartialEq)] -#[serde(rename_all = "kebab-case")] -pub enum CargoFutureIncompatFrequencyConfig { - Always, - Never, -} - -impl CargoFutureIncompatConfig { - pub fn should_display_message(&self) -> bool { - use CargoFutureIncompatFrequencyConfig::*; - - let frequency = self.frequency.as_ref().unwrap_or(&Always); - match frequency { - Always => true, - Never => false, - } - } -} - -impl Default for CargoFutureIncompatFrequencyConfig { - fn default() -> Self { - Self::Always - } -} - -/// Configuration for `ssl-version` in `http` section -/// There are two ways to configure: -/// -/// ```text -/// [http] -/// ssl-version = "tlsv1.3" -/// ``` -/// -/// ```text -/// [http] -/// ssl-version.min = "tlsv1.2" -/// ssl-version.max = "tlsv1.3" -/// ``` -#[derive(Clone, Debug, Deserialize, PartialEq)] -#[serde(untagged)] -pub enum SslVersionConfig { - Single(String), - Range(SslVersionConfigRange), -} - -#[derive(Clone, Debug, Deserialize, PartialEq)] -pub struct SslVersionConfigRange { - pub min: Option, - pub max: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct CargoNetConfig { - pub retry: Option, - pub offline: Option, - pub git_fetch_with_cli: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct CargoBuildConfig { - pub pipelining: Option, - pub dep_info_basedir: Option, - pub target_dir: Option, - pub incremental: Option, - pub target: Option, - pub jobs: Option, - pub rustflags: Option, - pub rustdocflags: Option, - pub rustc_wrapper: Option, - pub rustc_workspace_wrapper: Option, - pub rustc: Option, - pub rustdoc: Option, - pub out_dir: Option, -} - -#[derive(Deserialize, Default)] -struct TermConfig { - verbose: Option, - quiet: Option, - color: Option, - #[serde(default)] - #[serde(deserialize_with = "progress_or_string")] - progress: Option, -} - -#[derive(Debug, Default, Deserialize)] -pub struct ProgressConfig { - pub when: ProgressWhen, - pub width: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum ProgressWhen { - Auto, - Never, - Always, -} - -impl Default for ProgressWhen { - fn default() -> ProgressWhen { - ProgressWhen::Auto - } -} - -fn progress_or_string<'de, D>(deserializer: D) -> Result, D::Error> -where - D: serde::de::Deserializer<'de>, -{ - struct ProgressVisitor; - - impl<'de> serde::de::Visitor<'de> for ProgressVisitor { - type Value = Option; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a string (\"auto\" or \"never\") or a table") - } - - fn visit_str(self, s: &str) -> Result - where - E: serde::de::Error, - { - match s { - "auto" => Ok(Some(ProgressConfig { - when: ProgressWhen::Auto, - width: None, - })), - "never" => Ok(Some(ProgressConfig { - when: ProgressWhen::Never, - width: None, - })), - "always" => Err(E::custom("\"always\" progress requires a `width` key")), - _ => Err(E::unknown_variant(s, &["auto", "never"])), - } - } - - fn visit_none(self) -> Result - where - E: serde::de::Error, - { - Ok(None) - } - - fn visit_some(self, deserializer: D) -> Result - where - D: serde::de::Deserializer<'de>, - { - let pc = ProgressConfig::deserialize(deserializer)?; - if let ProgressConfig { - when: ProgressWhen::Always, - width: None, - } = pc - { - return Err(serde::de::Error::custom( - "\"always\" progress requires a `width` key", - )); - } - Ok(Some(pc)) - } - } - - deserializer.deserialize_option(ProgressVisitor) -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -enum EnvConfigValueInner { - Simple(String), - WithOptions { - value: String, - #[serde(default)] - force: bool, - #[serde(default)] - relative: bool, - }, -} - -#[derive(Debug, Deserialize)] -#[serde(transparent)] -pub struct EnvConfigValue { - inner: Value, -} - -impl EnvConfigValue { - pub fn is_force(&self) -> bool { - match self.inner.val { - EnvConfigValueInner::Simple(_) => false, - EnvConfigValueInner::WithOptions { force, .. } => force, - } - } - - pub fn resolve<'a>(&'a self, config: &Config) -> Cow<'a, OsStr> { - match self.inner.val { - EnvConfigValueInner::Simple(ref s) => Cow::Borrowed(OsStr::new(s.as_str())), - EnvConfigValueInner::WithOptions { - ref value, - relative, - .. - } => { - if relative { - let p = self.inner.definition.root(config).join(&value); - Cow::Owned(p.into_os_string()) - } else { - Cow::Borrowed(OsStr::new(value.as_str())) - } - } - } - } -} - -pub type EnvConfig = HashMap; - -/// A type to deserialize a list of strings from a toml file. -/// -/// Supports deserializing either a whitespace-separated list of arguments in a -/// single string or a string list itself. For example these deserialize to -/// equivalent values: -/// -/// ```toml -/// a = 'a b c' -/// b = ['a', 'b', 'c'] -/// ``` -#[derive(Debug, Deserialize, Clone)] -pub struct StringList(Vec); - -impl StringList { - pub fn as_slice(&self) -> &[String] { - &self.0 - } -} - -/// StringList automatically merges config values with environment values, -/// this instead follows the precedence rules, so that eg. a string list found -/// in the environment will be used instead of one in a config file. -/// -/// This is currently only used by `PathAndArgs` -#[derive(Debug, Deserialize)] -pub struct UnmergedStringList(Vec); - -#[macro_export] -macro_rules! __shell_print { - ($config:expr, $which:ident, $newline:literal, $($arg:tt)*) => ({ - let mut shell = $config.shell(); - let out = shell.$which(); - drop(out.write_fmt(format_args!($($arg)*))); - if $newline { - drop(out.write_all(b"\n")); - } - }); -} - -#[macro_export] -macro_rules! drop_println { - ($config:expr) => ( $crate::drop_print!($config, "\n") ); - ($config:expr, $($arg:tt)*) => ( - $crate::__shell_print!($config, out, true, $($arg)*) - ); -} - -#[macro_export] -macro_rules! drop_eprintln { - ($config:expr) => ( $crate::drop_eprint!($config, "\n") ); - ($config:expr, $($arg:tt)*) => ( - $crate::__shell_print!($config, err, true, $($arg)*) - ); -} - -#[macro_export] -macro_rules! drop_print { - ($config:expr, $($arg:tt)*) => ( - $crate::__shell_print!($config, out, false, $($arg)*) - ); -} - -#[macro_export] -macro_rules! drop_eprint { - ($config:expr, $($arg:tt)*) => ( - $crate::__shell_print!($config, err, false, $($arg)*) - ); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/path.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/path.rs deleted file mode 100644 index a90cab2b2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/path.rs +++ /dev/null @@ -1,78 +0,0 @@ -use super::{Config, UnmergedStringList, Value}; -use serde::{de::Error, Deserialize}; -use std::path::PathBuf; - -/// Use with the `get` API to fetch a string that will be converted to a -/// `PathBuf`. Relative paths are converted to absolute paths based on the -/// location of the config file. -#[derive(Debug, Deserialize, PartialEq, Clone)] -#[serde(transparent)] -pub struct ConfigRelativePath(Value); - -impl ConfigRelativePath { - /// Returns the underlying value. - pub fn value(&self) -> &Value { - &self.0 - } - - /// Returns the raw underlying configuration value for this key. - pub fn raw_value(&self) -> &str { - &self.0.val - } - - /// Resolves this configuration-relative path to an absolute path. - /// - /// This will always return an absolute path where it's relative to the - /// location for configuration for this value. - pub fn resolve_path(&self, config: &Config) -> PathBuf { - self.0.definition.root(config).join(&self.0.val) - } - - /// Resolves this configuration-relative path to either an absolute path or - /// something appropriate to execute from `PATH`. - /// - /// Values which don't look like a filesystem path (don't contain `/` or - /// `\`) will be returned as-is, and everything else will fall through to an - /// absolute path. - pub fn resolve_program(&self, config: &Config) -> PathBuf { - config.string_to_path(&self.0.val, &self.0.definition) - } -} - -/// A config type that is a program to run. -/// -/// This supports a list of strings like `['/path/to/program', 'somearg']` -/// or a space separated string like `'/path/to/program somearg'`. -/// -/// This expects the first value to be the path to the program to run. -/// Subsequent values are strings of arguments to pass to the program. -/// -/// Typically you should use `ConfigRelativePath::resolve_program` on the path -/// to get the actual program. -#[derive(Debug, Clone)] -pub struct PathAndArgs { - pub path: ConfigRelativePath, - pub args: Vec, -} - -impl<'de> serde::Deserialize<'de> for PathAndArgs { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let vsl = Value::::deserialize(deserializer)?; - let mut strings = vsl.val.0; - if strings.is_empty() { - return Err(D::Error::invalid_length(0, &"at least one element")); - } - let first = strings.remove(0); - let crp = Value { - val: first, - definition: vsl.definition, - }; - Ok(PathAndArgs { - path: ConfigRelativePath(crp), - args: strings, - }) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/target.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/target.rs deleted file mode 100644 index d259b9706..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/target.rs +++ /dev/null @@ -1,211 +0,0 @@ -use super::{Config, ConfigKey, ConfigRelativePath, OptValue, PathAndArgs, StringList, CV}; -use crate::core::compiler::{BuildOutput, LinkType}; -use crate::util::CargoResult; -use serde::Deserialize; -use std::collections::{BTreeMap, HashMap}; -use std::path::PathBuf; - -/// Config definition of a `[target.'cfg(โ€ฆ)']` table. -/// -/// This is a subset of `TargetConfig`. -#[derive(Debug, Deserialize)] -pub struct TargetCfgConfig { - pub runner: OptValue, - pub rustflags: OptValue, - // This is here just to ignore fields from normal `TargetConfig` because - // all `[target]` tables are getting deserialized, whether they start with - // `cfg(` or not. - #[serde(flatten)] - pub other: BTreeMap, -} - -/// Config definition of a `[target]` table or `[host]`. -#[derive(Debug, Clone)] -pub struct TargetConfig { - /// Process to run as a wrapper for `cargo run`, `test`, and `bench` commands. - pub runner: OptValue, - /// Additional rustc flags to pass. - pub rustflags: OptValue, - /// The path of the linker for this target. - pub linker: OptValue, - /// Build script override for the given library name. - /// - /// Any package with a `links` value for the given library name will skip - /// running its build script and instead use the given output from the - /// config file. - pub links_overrides: BTreeMap, -} - -/// Loads all of the `target.'cfg()'` tables. -pub(super) fn load_target_cfgs(config: &Config) -> CargoResult> { - // Load all [target] tables, filter out the cfg() entries. - let mut result = Vec::new(); - // Use a BTreeMap so the keys are sorted. This is important for - // deterministic ordering of rustflags, which affects fingerprinting and - // rebuilds. We may perhaps one day wish to ensure a deterministic - // ordering via the order keys were defined in files perhaps. - let target: BTreeMap = config.get("target")?; - log::debug!("Got all targets {:#?}", target); - for (key, cfg) in target { - if key.starts_with("cfg(") { - // Unfortunately this is not able to display the location of the - // unused key. Using config::Value doesn't work. One - // solution might be to create a special "Any" type, but I think - // that will be quite difficult with the current design. - for other_key in cfg.other.keys() { - config.shell().warn(format!( - "unused key `{}` in [target] config table `{}`", - other_key, key - ))?; - } - result.push((key, cfg)); - } - } - Ok(result) -} - -/// Returns true if the `[target]` table should be applied to host targets. -pub(super) fn get_target_applies_to_host(config: &Config) -> CargoResult { - if config.cli_unstable().target_applies_to_host { - if let Ok(target_applies_to_host) = config.get::("target-applies-to-host") { - Ok(target_applies_to_host) - } else { - Ok(!config.cli_unstable().host_config) - } - } else if config.cli_unstable().host_config { - anyhow::bail!( - "the -Zhost-config flag requires the -Ztarget-applies-to-host flag to be set" - ); - } else { - Ok(true) - } -} - -/// Loads a single `[host]` table for the given triple. -pub(super) fn load_host_triple(config: &Config, triple: &str) -> CargoResult { - if config.cli_unstable().host_config { - let host_triple_prefix = format!("host.{}", triple); - let host_triple_key = ConfigKey::from_str(&host_triple_prefix); - let host_prefix = match config.get_cv(&host_triple_key)? { - Some(_) => host_triple_prefix, - None => "host".to_string(), - }; - load_config_table(config, &host_prefix) - } else { - Ok(TargetConfig { - runner: None, - rustflags: None, - linker: None, - links_overrides: BTreeMap::new(), - }) - } -} - -/// Loads a single `[target]` table for the given triple. -pub(super) fn load_target_triple(config: &Config, triple: &str) -> CargoResult { - load_config_table(config, &format!("target.{}", triple)) -} - -/// Loads a single table for the given prefix. -fn load_config_table(config: &Config, prefix: &str) -> CargoResult { - // This needs to get each field individually because it cannot fetch the - // struct all at once due to `links_overrides`. Can't use `serde(flatten)` - // because it causes serde to use `deserialize_map` which means the config - // deserializer does not know which keys to deserialize, which means - // environment variables would not work. - let runner: OptValue = config.get(&format!("{}.runner", prefix))?; - let rustflags: OptValue = config.get(&format!("{}.rustflags", prefix))?; - let linker: OptValue = config.get(&format!("{}.linker", prefix))?; - // Links do not support environment variables. - let target_key = ConfigKey::from_str(prefix); - let links_overrides = match config.get_table(&target_key)? { - Some(links) => parse_links_overrides(&target_key, links.val)?, - None => BTreeMap::new(), - }; - Ok(TargetConfig { - runner, - rustflags, - linker, - links_overrides, - }) -} - -fn parse_links_overrides( - target_key: &ConfigKey, - links: HashMap, -) -> CargoResult> { - let mut links_overrides = BTreeMap::new(); - for (lib_name, value) in links { - // Skip these keys, it shares the namespace with `TargetConfig`. - match lib_name.as_str() { - // `ar` is a historical thing. - "ar" | "linker" | "runner" | "rustflags" => continue, - _ => {} - } - let mut output = BuildOutput::default(); - let table = value.table(&format!("{}.{}", target_key, lib_name))?.0; - // We require deterministic order of evaluation, so we must sort the pairs by key first. - let mut pairs = Vec::new(); - for (k, value) in table { - pairs.push((k, value)); - } - pairs.sort_by_key(|p| p.0); - for (key, value) in pairs { - match key.as_str() { - "rustc-flags" => { - let flags = value.string(key)?; - let whence = format!("target config `{}.{}` (in {})", target_key, key, flags.1); - let (paths, links) = BuildOutput::parse_rustc_flags(flags.0, &whence)?; - output.library_paths.extend(paths); - output.library_links.extend(links); - } - "rustc-link-lib" => { - let list = value.list(key)?; - output - .library_links - .extend(list.iter().map(|v| v.0.clone())); - } - "rustc-link-search" => { - let list = value.list(key)?; - output - .library_paths - .extend(list.iter().map(|v| PathBuf::from(&v.0))); - } - "rustc-link-arg-cdylib" | "rustc-cdylib-link-arg" => { - let args = value.list(key)?; - let args = args.iter().map(|v| (LinkType::Cdylib, v.0.clone())); - output.linker_args.extend(args); - } - "rustc-link-arg-bins" => { - let args = value.list(key)?; - let args = args.iter().map(|v| (LinkType::Bin, v.0.clone())); - output.linker_args.extend(args); - } - "rustc-link-arg" => { - let args = value.list(key)?; - let args = args.iter().map(|v| (LinkType::All, v.0.clone())); - output.linker_args.extend(args); - } - "rustc-cfg" => { - let list = value.list(key)?; - output.cfgs.extend(list.iter().map(|v| v.0.clone())); - } - "rustc-env" => { - for (name, val) in value.table(key)?.0 { - let val = val.string(name)?.0; - output.env.push((name.clone(), val.to_string())); - } - } - "warning" | "rerun-if-changed" | "rerun-if-env-changed" => { - anyhow::bail!("`{}` is not supported in build script overrides", key); - } - _ => { - let val = value.string(key)?.0; - output.metadata.push((key.clone(), val.to_string())); - } - } - } - links_overrides.insert(lib_name, output); - } - Ok(links_overrides) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/value.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/value.rs deleted file mode 100644 index 65b0bffe4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/config/value.rs +++ /dev/null @@ -1,225 +0,0 @@ -//! Deserialization of a `Value` type which tracks where it was deserialized -//! from. -//! -//! Often Cargo wants to report semantic error information or other sorts of -//! error information about configuration keys but it also may wish to indicate -//! as an error context where the key was defined as well (to help user -//! debugging). The `Value` type here can be used to deserialize a `T` value -//! from configuration, but also record where it was deserialized from when it -//! was read. - -use crate::util::config::Config; -use serde::de; -use std::fmt; -use std::marker; -use std::mem; -use std::path::{Path, PathBuf}; - -/// A type which can be deserialized as a configuration value which records -/// where it was deserialized from. -#[derive(Debug, PartialEq, Clone)] -pub struct Value { - /// The inner value that was deserialized. - pub val: T, - /// The location where `val` was defined in configuration (e.g. file it was - /// defined in, env var etc). - pub definition: Definition, -} - -pub type OptValue = Option>; - -// Deserializing `Value` is pretty special, and serde doesn't have built-in -// support for this operation. To implement this we extend serde's "data model" -// a bit. We configure deserialization of `Value` to basically only work with -// our one deserializer using configuration. -// -// We define that `Value` deserialization asks the deserializer for a very -// special struct name and struct field names. In doing so the deserializer will -// recognize this and synthesize a magical value for the `definition` field when -// we deserialize it. This protocol is how we're able to have a channel of -// information flowing from the configuration deserializer into the -// deserialization implementation here. -// -// You'll want to also check out the implementation of `ValueDeserializer` in -// `de.rs`. Also note that the names below are intended to be invalid Rust -// identifiers to avoid how they might conflict with other valid structures. -// Finally the `definition` field is transmitted as a tuple of i32/string, which -// is effectively a tagged union of `Definition` itself. - -pub(crate) const VALUE_FIELD: &str = "$__cargo_private_value"; -pub(crate) const DEFINITION_FIELD: &str = "$__cargo_private_definition"; -pub(crate) const NAME: &str = "$__cargo_private_Value"; -pub(crate) static FIELDS: [&str; 2] = [VALUE_FIELD, DEFINITION_FIELD]; - -/// Location where a config value is defined. -#[derive(Clone, Debug, Eq)] -pub enum Definition { - /// Defined in a `.cargo/config`, includes the path to the file. - Path(PathBuf), - /// Defined in an environment variable, includes the environment key. - Environment(String), - /// Passed in on the command line. - Cli, -} - -impl Definition { - /// Root directory where this is defined. - /// - /// If from a file, it is the directory above `.cargo/config`. - /// CLI and env are the current working directory. - pub fn root<'a>(&'a self, config: &'a Config) -> &'a Path { - match self { - Definition::Path(p) => p.parent().unwrap().parent().unwrap(), - Definition::Environment(_) | Definition::Cli => config.cwd(), - } - } - - /// Returns true if self is a higher priority to other. - /// - /// CLI is preferred over environment, which is preferred over files. - pub fn is_higher_priority(&self, other: &Definition) -> bool { - matches!( - (self, other), - (Definition::Cli, Definition::Environment(_)) - | (Definition::Cli, Definition::Path(_)) - | (Definition::Environment(_), Definition::Path(_)) - ) - } -} - -impl PartialEq for Definition { - fn eq(&self, other: &Definition) -> bool { - // configuration values are equivalent no matter where they're defined, - // but they need to be defined in the same location. For example if - // they're defined in the environment that's different than being - // defined in a file due to path interpretations. - mem::discriminant(self) == mem::discriminant(other) - } -} - -impl fmt::Display for Definition { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Definition::Path(p) => p.display().fmt(f), - Definition::Environment(key) => write!(f, "environment variable `{}`", key), - Definition::Cli => write!(f, "--config cli option"), - } - } -} - -impl<'de, T> de::Deserialize<'de> for Value -where - T: de::Deserialize<'de>, -{ - fn deserialize(deserializer: D) -> Result, D::Error> - where - D: de::Deserializer<'de>, - { - struct ValueVisitor { - _marker: marker::PhantomData, - } - - impl<'de, T> de::Visitor<'de> for ValueVisitor - where - T: de::Deserialize<'de>, - { - type Value = Value; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a value") - } - - fn visit_map(self, mut visitor: V) -> Result, V::Error> - where - V: de::MapAccess<'de>, - { - let value = visitor.next_key::()?; - if value.is_none() { - return Err(de::Error::custom("value not found")); - } - let val: T = visitor.next_value()?; - - let definition = visitor.next_key::()?; - if definition.is_none() { - return Err(de::Error::custom("definition not found")); - } - let definition: Definition = visitor.next_value()?; - Ok(Value { val, definition }) - } - } - - deserializer.deserialize_struct( - NAME, - &FIELDS, - ValueVisitor { - _marker: marker::PhantomData, - }, - ) - } -} - -struct FieldVisitor { - expected: &'static str, -} - -impl<'de> de::Visitor<'de> for FieldVisitor { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a valid value field") - } - - fn visit_str(self, s: &str) -> Result<(), E> - where - E: de::Error, - { - if s == self.expected { - Ok(()) - } else { - Err(de::Error::custom("expected field with custom name")) - } - } -} - -struct ValueKey; - -impl<'de> de::Deserialize<'de> for ValueKey { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - deserializer.deserialize_identifier(FieldVisitor { - expected: VALUE_FIELD, - })?; - Ok(ValueKey) - } -} - -struct DefinitionKey; - -impl<'de> de::Deserialize<'de> for DefinitionKey { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - deserializer.deserialize_identifier(FieldVisitor { - expected: DEFINITION_FIELD, - })?; - Ok(DefinitionKey) - } -} - -impl<'de> de::Deserialize<'de> for Definition { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - let (discr, value) = <(u32, String)>::deserialize(deserializer)?; - match discr { - 0 => Ok(Definition::Path(value.into())), - 1 => Ok(Definition::Environment(value)), - 2 => Ok(Definition::Cli), - _ => panic!("unexpected discriminant {} value {}", discr, value), - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/counter.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/counter.rs deleted file mode 100644 index 82e5addae..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/counter.rs +++ /dev/null @@ -1,67 +0,0 @@ -use std::time::Instant; - -/// A metrics counter storing only latest `N` records. -pub struct MetricsCounter { - /// Slots to store metrics. - slots: [(usize, Instant); N], - /// The slot of the oldest record. - /// Also the next slot to store the new record. - index: usize, -} - -impl MetricsCounter { - /// Creates a new counter with an initial value. - pub fn new(init: usize, init_at: Instant) -> Self { - assert!(N > 0, "number of slots must be greater than zero"); - Self { - slots: [(init, init_at); N], - index: 0, - } - } - - /// Adds record to the counter. - pub fn add(&mut self, data: usize, added_at: Instant) { - self.slots[self.index] = (data, added_at); - self.index = (self.index + 1) % N; - } - - /// Calculates per-second average rate of all slots. - pub fn rate(&self) -> f32 { - let latest = self.slots[self.index.checked_sub(1).unwrap_or(N - 1)]; - let oldest = self.slots[self.index]; - let duration = (latest.1 - oldest.1).as_secs_f32(); - let avg = (latest.0 - oldest.0) as f32 / duration; - if f32::is_nan(avg) { - 0f32 - } else { - avg - } - } -} - -#[cfg(test)] -mod tests { - use super::MetricsCounter; - use std::time::{Duration, Instant}; - - #[test] - fn counter() { - let now = Instant::now(); - let mut counter = MetricsCounter::<3>::new(0, now); - assert_eq!(counter.rate(), 0f32); - counter.add(1, now + Duration::from_secs(1)); - assert_eq!(counter.rate(), 1f32); - counter.add(4, now + Duration::from_secs(2)); - assert_eq!(counter.rate(), 2f32); - counter.add(7, now + Duration::from_secs(3)); - assert_eq!(counter.rate(), 3f32); - counter.add(12, now + Duration::from_secs(4)); - assert_eq!(counter.rate(), 4f32); - } - - #[test] - #[should_panic(expected = "number of slots must be greater than zero")] - fn counter_zero_slot() { - let _counter = MetricsCounter::<0>::new(0, Instant::now()); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/cpu.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/cpu.rs deleted file mode 100644 index 3fe50d372..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/cpu.rs +++ /dev/null @@ -1,243 +0,0 @@ -use std::io; - -pub struct State(imp::State); - -impl State { - /// Captures the current state of all CPUs on the system. - /// - /// The `State` returned here isn't too meaningful in terms of - /// interpretation across platforms, but it can be compared to previous - /// states to get a meaningful cross-platform number. - pub fn current() -> io::Result { - imp::current().map(State) - } - - /// Returns the percentage of time CPUs were idle from the current state - /// relative to the previous state, as a percentage from 0.0 to 100.0. - /// - /// This function will return, as a percentage, the amount of time that the - /// entire system was idle between the `previous` state and this own state. - /// This can be useful to compare two snapshots in time of CPU usage to see - /// how the CPU usage compares between the two. - pub fn idle_since(&self, previous: &State) -> f64 { - imp::pct_idle(&previous.0, &self.0) - } -} - -#[cfg(target_os = "linux")] -mod imp { - use std::{fs, io}; - - pub struct State { - user: u64, - nice: u64, - system: u64, - idle: u64, - iowait: u64, - irq: u64, - softirq: u64, - steal: u64, - guest: u64, - guest_nice: u64, - } - - pub fn current() -> io::Result { - let state = fs::read_to_string("/proc/stat")?; - - (|| { - let mut parts = state.lines().next()?.split_whitespace(); - if parts.next()? != "cpu" { - return None; - } - Some(State { - user: parts.next()?.parse::().ok()?, - nice: parts.next()?.parse::().ok()?, - system: parts.next()?.parse::().ok()?, - idle: parts.next()?.parse::().ok()?, - iowait: parts.next()?.parse::().ok()?, - irq: parts.next()?.parse::().ok()?, - softirq: parts.next()?.parse::().ok()?, - steal: parts.next()?.parse::().ok()?, - guest: parts.next()?.parse::().ok()?, - guest_nice: parts.next()?.parse::().ok()?, - }) - })() - .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "first line of /proc/stat malformed")) - } - - pub fn pct_idle(prev: &State, next: &State) -> f64 { - let user = next.user - prev.user; - let nice = next.nice - prev.nice; - let system = next.system - prev.system; - let idle = next.idle - prev.idle; - let iowait = next.iowait.saturating_sub(prev.iowait); - let irq = next.irq - prev.irq; - let softirq = next.softirq - prev.softirq; - let steal = next.steal - prev.steal; - let guest = next.guest - prev.guest; - let guest_nice = next.guest_nice - prev.guest_nice; - let total = - user + nice + system + idle + iowait + irq + softirq + steal + guest + guest_nice; - - (idle as f64) / (total as f64) * 100.0 - } -} - -#[cfg(target_os = "macos")] -#[allow(bad_style)] -mod imp { - use std::io; - use std::ptr; - - type host_t = u32; - type mach_port_t = u32; - type vm_map_t = mach_port_t; - type vm_offset_t = usize; - type vm_size_t = usize; - type vm_address_t = vm_offset_t; - type processor_flavor_t = i32; - type natural_t = u32; - type processor_info_array_t = *mut i32; - type mach_msg_type_number_t = i32; - type kern_return_t = i32; - - const PROESSOR_CPU_LOAD_INFO: processor_flavor_t = 2; - const CPU_STATE_USER: usize = 0; - const CPU_STATE_SYSTEM: usize = 1; - const CPU_STATE_IDLE: usize = 2; - const CPU_STATE_NICE: usize = 3; - const CPU_STATE_MAX: usize = 4; - - extern "C" { - static mut mach_task_self_: mach_port_t; - - fn mach_host_self() -> mach_port_t; - fn host_processor_info( - host: host_t, - flavor: processor_flavor_t, - out_processor_count: *mut natural_t, - out_processor_info: *mut processor_info_array_t, - out_processor_infoCnt: *mut mach_msg_type_number_t, - ) -> kern_return_t; - fn vm_deallocate( - target_task: vm_map_t, - address: vm_address_t, - size: vm_size_t, - ) -> kern_return_t; - } - - pub struct State { - user: u64, - system: u64, - idle: u64, - nice: u64, - } - - #[repr(C)] - struct processor_cpu_load_info_data_t { - cpu_ticks: [u32; CPU_STATE_MAX], - } - - pub fn current() -> io::Result { - // There's scant little documentation on `host_processor_info` - // throughout the internet, so this is just modeled after what everyone - // else is doing. For now this is modeled largely after libuv. - - unsafe { - let mut num_cpus_u = 0; - let mut cpu_info = ptr::null_mut(); - let mut msg_type = 0; - let err = host_processor_info( - mach_host_self(), - PROESSOR_CPU_LOAD_INFO, - &mut num_cpus_u, - &mut cpu_info, - &mut msg_type, - ); - if err != 0 { - return Err(io::Error::last_os_error()); - } - let mut ret = State { - user: 0, - system: 0, - idle: 0, - nice: 0, - }; - let mut current = cpu_info as *const processor_cpu_load_info_data_t; - for _ in 0..num_cpus_u { - ret.user += (*current).cpu_ticks[CPU_STATE_USER] as u64; - ret.system += (*current).cpu_ticks[CPU_STATE_SYSTEM] as u64; - ret.idle += (*current).cpu_ticks[CPU_STATE_IDLE] as u64; - ret.nice += (*current).cpu_ticks[CPU_STATE_NICE] as u64; - current = current.offset(1); - } - vm_deallocate(mach_task_self_, cpu_info as vm_address_t, msg_type as usize); - Ok(ret) - } - } - - pub fn pct_idle(prev: &State, next: &State) -> f64 { - let user = next.user - prev.user; - let system = next.system - prev.system; - let idle = next.idle - prev.idle; - let nice = next.nice - prev.nice; - let total = user + system + idle + nice; - (idle as f64) / (total as f64) * 100.0 - } -} - -#[cfg(windows)] -mod imp { - use std::io; - use std::mem; - use winapi::shared::minwindef::*; - use winapi::um::processthreadsapi::*; - - pub struct State { - idle: FILETIME, - kernel: FILETIME, - user: FILETIME, - } - - pub fn current() -> io::Result { - unsafe { - let mut ret = mem::zeroed::(); - let r = GetSystemTimes(&mut ret.idle, &mut ret.kernel, &mut ret.user); - if r != 0 { - Ok(ret) - } else { - Err(io::Error::last_os_error()) - } - } - } - - pub fn pct_idle(prev: &State, next: &State) -> f64 { - fn to_u64(a: &FILETIME) -> u64 { - ((a.dwHighDateTime as u64) << 32) | (a.dwLowDateTime as u64) - } - - let idle = to_u64(&next.idle) - to_u64(&prev.idle); - let kernel = to_u64(&next.kernel) - to_u64(&prev.kernel); - let user = to_u64(&next.user) - to_u64(&prev.user); - let total = user + kernel; - (idle as f64) / (total as f64) * 100.0 - } -} - -#[cfg(not(any(target_os = "linux", target_os = "macos", windows)))] -mod imp { - use std::io; - - pub struct State; - - pub fn current() -> io::Result { - Err(io::Error::new( - io::ErrorKind::Other, - "unsupported platform to learn CPU state", - )) - } - - pub fn pct_idle(_prev: &State, _next: &State) -> f64 { - unimplemented!() - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/dependency_queue.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/dependency_queue.rs deleted file mode 100644 index 441676562..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/dependency_queue.rs +++ /dev/null @@ -1,259 +0,0 @@ -//! A graph-like structure used to represent a set of dependencies and in what -//! order they should be built. -//! -//! This structure is used to store the dependency graph and dynamically update -//! it to figure out when a dependency should be built. -//! -//! Dependencies in this queue are represented as a (node, edge) pair. This is -//! used to model nodes which produce multiple outputs at different times but -//! some nodes may only require one of the outputs and can start before the -//! whole node is finished. - -use std::collections::{HashMap, HashSet}; -use std::hash::Hash; - -#[derive(Debug)] -pub struct DependencyQueue { - /// A list of all known keys to build. - /// - /// The value of the hash map is list of dependencies which still need to be - /// built before the package can be built. Note that the set is dynamically - /// updated as more dependencies are built. - dep_map: HashMap, V)>, - - /// A reverse mapping of a package to all packages that depend on that - /// package. - /// - /// This map is statically known and does not get updated throughout the - /// lifecycle of the DependencyQueue. - /// - /// This is sort of like a `HashMap<(N, E), HashSet>` map, but more - /// easily indexable with just an `N` - reverse_dep_map: HashMap>>, - - /// The relative priority of this package. Higher values should be scheduled sooner. - priority: HashMap, - - /// An expected cost for building this package. Used to determine priority. - cost: HashMap, -} - -impl Default for DependencyQueue { - fn default() -> DependencyQueue { - DependencyQueue::new() - } -} - -impl DependencyQueue { - /// Creates a new dependency queue with 0 packages. - pub fn new() -> DependencyQueue { - DependencyQueue { - dep_map: HashMap::new(), - reverse_dep_map: HashMap::new(), - priority: HashMap::new(), - cost: HashMap::new(), - } - } -} - -impl DependencyQueue { - /// Adds a new node and its dependencies to this queue. - /// - /// The `key` specified is a new node in the dependency graph, and the node - /// depend on all the dependencies iterated by `dependencies`. Each - /// dependency is a node/edge pair, where edges can be thought of as - /// productions from nodes (aka if it's just `()` it's just waiting for the - /// node to finish). - /// - /// An optional `value` can also be associated with `key` which is reclaimed - /// when the node is ready to go. - /// - /// The cost parameter can be used to hint at the relative cost of building - /// this node. This implementation does not care about the units of this value, so - /// the calling code is free to use whatever they'd like. In general, higher cost - /// nodes are expected to take longer to build. - pub fn queue( - &mut self, - key: N, - value: V, - dependencies: impl IntoIterator, - cost: usize, - ) { - assert!(!self.dep_map.contains_key(&key)); - - let mut my_dependencies = HashSet::new(); - for (dep, edge) in dependencies { - my_dependencies.insert((dep.clone(), edge.clone())); - self.reverse_dep_map - .entry(dep) - .or_insert_with(HashMap::new) - .entry(edge) - .or_insert_with(HashSet::new) - .insert(key.clone()); - } - self.dep_map.insert(key.clone(), (my_dependencies, value)); - self.cost.insert(key, cost); - } - - /// All nodes have been added, calculate some internal metadata and prepare - /// for `dequeue`. - pub fn queue_finished(&mut self) { - let mut out = HashMap::new(); - for key in self.dep_map.keys() { - depth(key, &self.reverse_dep_map, &mut out); - } - self.priority = out - .into_iter() - .map(|(n, set)| { - let total_cost = - self.cost[&n] + set.iter().map(|key| self.cost[key]).sum::(); - (n, total_cost) - }) - .collect(); - - /// Creates a flattened reverse dependency list. For a given key, finds the - /// set of nodes which depend on it, including transitively. This is different - /// from self.reverse_dep_map because self.reverse_dep_map only maps one level - /// of reverse dependencies. - fn depth<'a, N: Hash + Eq + Clone, E: Hash + Eq + Clone>( - key: &N, - map: &HashMap>>, - results: &'a mut HashMap>, - ) -> &'a HashSet { - if results.contains_key(key) { - let depth = &results[key]; - assert!(!depth.is_empty(), "cycle in DependencyQueue"); - return depth; - } - results.insert(key.clone(), HashSet::new()); - - let mut set = HashSet::new(); - set.insert(key.clone()); - - for dep in map - .get(key) - .into_iter() - .flat_map(|it| it.values()) - .flatten() - { - set.extend(depth(dep, map, results).iter().cloned()) - } - - let slot = results.get_mut(key).unwrap(); - *slot = set; - &*slot - } - } - - /// Dequeues a package that is ready to be built. - /// - /// A package is ready to be built when it has 0 un-built dependencies. If - /// `None` is returned then no packages are ready to be built. - pub fn dequeue(&mut self) -> Option<(N, V)> { - let next = self - .dep_map - .iter() - .filter(|(_, (deps, _))| deps.is_empty()) - .map(|(key, _)| key.clone()) - .max_by_key(|k| self.priority[k]); - let key = match next { - Some(key) => key, - None => return None, - }; - let (_, data) = self.dep_map.remove(&key).unwrap(); - Some((key, data)) - } - - /// Returns `true` if there are remaining packages to be built. - pub fn is_empty(&self) -> bool { - self.dep_map.is_empty() - } - - /// Returns the number of remaining packages to be built. - pub fn len(&self) -> usize { - self.dep_map.len() - } - - /// Indicate that something has finished. - /// - /// Calling this function indicates that the `node` has produced `edge`. All - /// remaining work items which only depend on this node/edge pair are now - /// candidates to start their job. - /// - /// Returns the nodes that are now allowed to be dequeued as a result of - /// finishing this node. - pub fn finish(&mut self, node: &N, edge: &E) -> Vec<&N> { - // hashset - let reverse_deps = self.reverse_dep_map.get(node).and_then(|map| map.get(edge)); - let reverse_deps = match reverse_deps { - Some(deps) => deps, - None => return Vec::new(), - }; - let key = (node.clone(), edge.clone()); - let mut result = Vec::new(); - for dep in reverse_deps.iter() { - let edges = &mut self.dep_map.get_mut(dep).unwrap().0; - assert!(edges.remove(&key)); - if edges.is_empty() { - result.push(dep); - } - } - result - } -} - -#[cfg(test)] -mod test { - use super::DependencyQueue; - - #[test] - fn deep_first_equal_cost() { - let mut q = DependencyQueue::new(); - - q.queue(1, (), vec![], 1); - q.queue(2, (), vec![(1, ())], 1); - q.queue(3, (), vec![], 1); - q.queue(4, (), vec![(2, ()), (3, ())], 1); - q.queue(5, (), vec![(4, ()), (3, ())], 1); - q.queue_finished(); - - assert_eq!(q.dequeue(), Some((1, ()))); - assert_eq!(q.dequeue(), Some((3, ()))); - assert_eq!(q.dequeue(), None); - q.finish(&3, &()); - assert_eq!(q.dequeue(), None); - q.finish(&1, &()); - assert_eq!(q.dequeue(), Some((2, ()))); - assert_eq!(q.dequeue(), None); - q.finish(&2, &()); - assert_eq!(q.dequeue(), Some((4, ()))); - assert_eq!(q.dequeue(), None); - q.finish(&4, &()); - assert_eq!(q.dequeue(), Some((5, ()))); - } - - #[test] - fn sort_by_highest_cost() { - let mut q = DependencyQueue::new(); - - q.queue(1, (), vec![], 1); - q.queue(2, (), vec![(1, ())], 1); - q.queue(3, (), vec![], 4); - q.queue(4, (), vec![(2, ()), (3, ())], 1); - q.queue_finished(); - - assert_eq!(q.dequeue(), Some((3, ()))); - assert_eq!(q.dequeue(), Some((1, ()))); - assert_eq!(q.dequeue(), None); - q.finish(&3, &()); - assert_eq!(q.dequeue(), None); - q.finish(&1, &()); - assert_eq!(q.dequeue(), Some((2, ()))); - assert_eq!(q.dequeue(), None); - q.finish(&2, &()); - assert_eq!(q.dequeue(), Some((4, ()))); - assert_eq!(q.dequeue(), None); - q.finish(&4, &()); - assert_eq!(q.dequeue(), None); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/diagnostic_server.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/diagnostic_server.rs deleted file mode 100644 index 22bbf0ca2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/diagnostic_server.rs +++ /dev/null @@ -1,297 +0,0 @@ -//! A small TCP server to handle collection of diagnostics information in a -//! cross-platform way for the `cargo fix` command. - -use std::collections::HashSet; -use std::env; -use std::io::{BufReader, Read, Write}; -use std::net::{Shutdown, SocketAddr, TcpListener, TcpStream}; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::Arc; -use std::thread::{self, JoinHandle}; - -use anyhow::{Context, Error}; -use cargo_util::ProcessBuilder; -use log::warn; -use serde::{Deserialize, Serialize}; - -use crate::core::Edition; -use crate::util::errors::CargoResult; -use crate::util::Config; - -const DIAGNOSICS_SERVER_VAR: &str = "__CARGO_FIX_DIAGNOSTICS_SERVER"; -const PLEASE_REPORT_THIS_BUG: &str = - "This likely indicates a bug in either rustc or cargo itself,\n\ - and we would appreciate a bug report! You're likely to see \n\ - a number of compiler warnings after this message which cargo\n\ - attempted to fix but failed. If you could open an issue at\n\ - https://github.com/rust-lang/rust/issues\n\ - quoting the full output of this command we'd be very appreciative!\n\ - Note that you may be able to make some more progress in the near-term\n\ - fixing code with the `--broken-code` flag\n\n\ - "; - -#[derive(Deserialize, Serialize, Hash, Eq, PartialEq, Clone)] -pub enum Message { - Migrating { - file: String, - from_edition: Edition, - to_edition: Edition, - }, - Fixing { - file: String, - }, - Fixed { - file: String, - fixes: u32, - }, - FixFailed { - files: Vec, - krate: Option, - errors: Vec, - abnormal_exit: Option, - }, - ReplaceFailed { - file: String, - message: String, - }, - EditionAlreadyEnabled { - message: String, - edition: Edition, - }, -} - -impl Message { - pub fn post(&self) -> Result<(), Error> { - let addr = - env::var(DIAGNOSICS_SERVER_VAR).context("diagnostics collector misconfigured")?; - let mut client = - TcpStream::connect(&addr).context("failed to connect to parent diagnostics target")?; - - let s = serde_json::to_string(self).context("failed to serialize message")?; - client - .write_all(s.as_bytes()) - .context("failed to write message to diagnostics target")?; - client - .shutdown(Shutdown::Write) - .context("failed to shutdown")?; - - let mut tmp = Vec::new(); - client - .read_to_end(&mut tmp) - .context("failed to receive a disconnect")?; - - Ok(()) - } -} - -pub struct DiagnosticPrinter<'a> { - config: &'a Config, - dedupe: HashSet, -} - -impl<'a> DiagnosticPrinter<'a> { - pub fn new(config: &'a Config) -> DiagnosticPrinter<'a> { - DiagnosticPrinter { - config, - dedupe: HashSet::new(), - } - } - - pub fn print(&mut self, msg: &Message) -> CargoResult<()> { - match msg { - Message::Migrating { - file, - from_edition, - to_edition, - } => { - if !self.dedupe.insert(msg.clone()) { - return Ok(()); - } - self.config.shell().status( - "Migrating", - &format!("{} from {} edition to {}", file, from_edition, to_edition), - ) - } - Message::Fixing { file } => self - .config - .shell() - .verbose(|shell| shell.status("Fixing", file)), - Message::Fixed { file, fixes } => { - let msg = if *fixes == 1 { "fix" } else { "fixes" }; - let msg = format!("{} ({} {})", file, fixes, msg); - self.config.shell().status("Fixed", msg) - } - Message::ReplaceFailed { file, message } => { - let msg = format!("error applying suggestions to `{}`\n", file); - self.config.shell().warn(&msg)?; - write!( - self.config.shell().err(), - "The full error message was:\n\n> {}\n\n", - message, - )?; - write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?; - Ok(()) - } - Message::FixFailed { - files, - krate, - errors, - abnormal_exit, - } => { - if let Some(ref krate) = *krate { - self.config.shell().warn(&format!( - "failed to automatically apply fixes suggested by rustc \ - to crate `{}`", - krate, - ))?; - } else { - self.config - .shell() - .warn("failed to automatically apply fixes suggested by rustc")?; - } - if !files.is_empty() { - writeln!( - self.config.shell().err(), - "\nafter fixes were automatically applied the compiler \ - reported errors within these files:\n" - )?; - for file in files { - writeln!(self.config.shell().err(), " * {}", file)?; - } - writeln!(self.config.shell().err())?; - } - write!(self.config.shell().err(), "{}", PLEASE_REPORT_THIS_BUG)?; - if !errors.is_empty() { - writeln!( - self.config.shell().err(), - "The following errors were reported:" - )?; - for error in errors { - write!(self.config.shell().err(), "{}", error)?; - if !error.ends_with('\n') { - writeln!(self.config.shell().err())?; - } - } - } - if let Some(exit) = abnormal_exit { - writeln!( - self.config.shell().err(), - "rustc exited abnormally: {}", - exit - )?; - } - writeln!( - self.config.shell().err(), - "Original diagnostics will follow.\n" - )?; - Ok(()) - } - Message::EditionAlreadyEnabled { message, edition } => { - if !self.dedupe.insert(msg.clone()) { - return Ok(()); - } - // Don't give a really verbose warning if it has already been issued. - if self.dedupe.insert(Message::EditionAlreadyEnabled { - message: "".to_string(), // Dummy, so that this only long-warns once. - edition: *edition, - }) { - self.config.shell().warn(&format!("\ -{} - -If you are trying to migrate from the previous edition ({prev_edition}), the -process requires following these steps: - -1. Start with `edition = \"{prev_edition}\"` in `Cargo.toml` -2. Run `cargo fix --edition` -3. Modify `Cargo.toml` to set `edition = \"{this_edition}\"` -4. Run `cargo build` or `cargo test` to verify the fixes worked - -More details may be found at -https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html -", - message, this_edition=edition, prev_edition=edition.previous().unwrap() - )) - } else { - self.config.shell().warn(message) - } - } - } - } -} - -#[derive(Debug)] -pub struct RustfixDiagnosticServer { - listener: TcpListener, - addr: SocketAddr, -} - -pub struct StartedServer { - addr: SocketAddr, - done: Arc, - thread: Option>, -} - -impl RustfixDiagnosticServer { - pub fn new() -> Result { - let listener = TcpListener::bind("127.0.0.1:0") - .with_context(|| "failed to bind TCP listener to manage locking")?; - let addr = listener.local_addr()?; - - Ok(RustfixDiagnosticServer { listener, addr }) - } - - pub fn configure(&self, process: &mut ProcessBuilder) { - process.env(DIAGNOSICS_SERVER_VAR, self.addr.to_string()); - } - - pub fn start(self, on_message: F) -> Result - where - F: Fn(Message) + Send + 'static, - { - let addr = self.addr; - let done = Arc::new(AtomicBool::new(false)); - let done2 = done.clone(); - let thread = thread::spawn(move || { - self.run(&on_message, &done2); - }); - - Ok(StartedServer { - addr, - thread: Some(thread), - done, - }) - } - - fn run(self, on_message: &dyn Fn(Message), done: &AtomicBool) { - while let Ok((client, _)) = self.listener.accept() { - if done.load(Ordering::SeqCst) { - break; - } - let mut client = BufReader::new(client); - let mut s = String::new(); - if let Err(e) = client.read_to_string(&mut s) { - warn!("diagnostic server failed to read: {}", e); - } else { - match serde_json::from_str(&s) { - Ok(message) => on_message(message), - Err(e) => warn!("invalid diagnostics message: {}", e), - } - } - // The client should be kept alive until after `on_message` is - // called to ensure that the client doesn't exit too soon (and - // Message::Finish getting posted before Message::FixDiagnostic). - drop(client); - } - } -} - -impl Drop for StartedServer { - fn drop(&mut self) { - self.done.store(true, Ordering::SeqCst); - // Ignore errors here as this is largely best-effort - if TcpStream::connect(&self.addr).is_err() { - return; - } - drop(self.thread.take().unwrap().join()); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/errors.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/errors.rs deleted file mode 100644 index 8a3716077..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/errors.rs +++ /dev/null @@ -1,308 +0,0 @@ -#![allow(unknown_lints)] - -use crate::core::{TargetKind, Workspace}; -use crate::ops::CompileOptions; -use anyhow::Error; -use cargo_util::ProcessError; -use std::fmt; -use std::path::PathBuf; - -pub type CargoResult = anyhow::Result; - -#[derive(Debug)] -pub struct HttpNot200 { - pub code: u32, - pub url: String, -} - -impl fmt::Display for HttpNot200 { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "failed to get 200 response from `{}`, got {}", - self.url, self.code - ) - } -} - -impl std::error::Error for HttpNot200 {} - -// ============================================================================= -// Verbose error - -/// An error wrapper for errors that should only be displayed with `--verbose`. -/// -/// This should only be used in rare cases. When emitting this error, you -/// should have a normal error higher up the error-cause chain (like "could -/// not compile `foo`"), so at least *something* gets printed without -/// `--verbose`. -pub struct VerboseError { - inner: Error, -} - -impl VerboseError { - pub fn new(inner: Error) -> VerboseError { - VerboseError { inner } - } -} - -impl std::error::Error for VerboseError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.inner.source() - } -} - -impl fmt::Debug for VerboseError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.inner.fmt(f) - } -} - -impl fmt::Display for VerboseError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.inner.fmt(f) - } -} - -// ============================================================================= -// Internal error - -/// An unexpected, internal error. -/// -/// This should only be used for unexpected errors. It prints a message asking -/// the user to file a bug report. -pub struct InternalError { - inner: Error, -} - -impl InternalError { - pub fn new(inner: Error) -> InternalError { - InternalError { inner } - } -} - -impl std::error::Error for InternalError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.inner.source() - } -} - -impl fmt::Debug for InternalError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.inner.fmt(f) - } -} - -impl fmt::Display for InternalError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.inner.fmt(f) - } -} - -// ============================================================================= -// Manifest error - -/// Error wrapper related to a particular manifest and providing it's path. -/// -/// This error adds no displayable info of it's own. -pub struct ManifestError { - cause: Error, - manifest: PathBuf, -} - -impl ManifestError { - pub fn new>(cause: E, manifest: PathBuf) -> Self { - Self { - cause: cause.into(), - manifest, - } - } - - pub fn manifest_path(&self) -> &PathBuf { - &self.manifest - } - - /// Returns an iterator over the `ManifestError` chain of causes. - /// - /// So if this error was not caused by another `ManifestError` this will be empty. - pub fn manifest_causes(&self) -> ManifestCauses<'_> { - ManifestCauses { current: self } - } -} - -impl std::error::Error for ManifestError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.cause.source() - } -} - -impl fmt::Debug for ManifestError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.cause.fmt(f) - } -} - -impl fmt::Display for ManifestError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.cause.fmt(f) - } -} - -/// An iterator over the `ManifestError` chain of causes. -pub struct ManifestCauses<'a> { - current: &'a ManifestError, -} - -impl<'a> Iterator for ManifestCauses<'a> { - type Item = &'a ManifestError; - - fn next(&mut self) -> Option { - self.current = self.current.cause.downcast_ref()?; - Some(self.current) - } -} - -impl<'a> ::std::iter::FusedIterator for ManifestCauses<'a> {} - -// ============================================================================= -// Cargo test errors. - -/// Error when testcases fail -#[derive(Debug)] -pub struct CargoTestError { - pub test: Test, - pub desc: String, - pub code: Option, - pub causes: Vec, -} - -impl fmt::Display for CargoTestError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.desc.fmt(f) - } -} - -impl std::error::Error for CargoTestError {} - -#[derive(Debug)] -pub enum Test { - Multiple, - Doc, - UnitTest { - kind: TargetKind, - name: String, - pkg_name: String, - }, -} - -impl CargoTestError { - pub fn new(test: Test, errors: Vec) -> Self { - if errors.is_empty() { - panic!("Cannot create CargoTestError from empty Vec") - } - let desc = errors - .iter() - .map(|error| error.desc.clone()) - .collect::>() - .join("\n"); - CargoTestError { - test, - desc, - code: errors[0].code, - causes: errors, - } - } - - pub fn hint(&self, ws: &Workspace<'_>, opts: &CompileOptions) -> String { - match self.test { - Test::UnitTest { - ref kind, - ref name, - ref pkg_name, - } => { - let pkg_info = if opts.spec.needs_spec_flag(ws) { - format!("-p {} ", pkg_name) - } else { - String::new() - }; - - match *kind { - TargetKind::Bench => { - format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name) - } - TargetKind::Bin => { - format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name) - } - TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--lib'", pkg_info), - TargetKind::Test => { - format!("test failed, to rerun pass '{}--test {}'", pkg_info, name) - } - TargetKind::ExampleBin | TargetKind::ExampleLib(_) => { - format!("test failed, to rerun pass '{}--example {}", pkg_info, name) - } - _ => "test failed.".into(), - } - } - Test::Doc => "test failed, to rerun pass '--doc'".into(), - _ => "test failed.".into(), - } - } -} - -// ============================================================================= -// CLI errors - -pub type CliResult = Result<(), CliError>; - -#[derive(Debug)] -/// The CLI error is the error type used at Cargo's CLI-layer. -/// -/// All errors from the lib side of Cargo will get wrapped with this error. -/// Other errors (such as command-line argument validation) will create this -/// directly. -pub struct CliError { - /// The error to display. This can be `None` in rare cases to exit with a - /// code without displaying a message. For example `cargo run -q` where - /// the resulting process exits with a nonzero code (on Windows), or an - /// external subcommand that exits nonzero (we assume it printed its own - /// message). - pub error: Option, - /// The process exit code. - pub exit_code: i32, -} - -impl CliError { - pub fn new(error: anyhow::Error, code: i32) -> CliError { - CliError { - error: Some(error), - exit_code: code, - } - } - - pub fn code(code: i32) -> CliError { - CliError { - error: None, - exit_code: code, - } - } -} - -impl From for CliError { - fn from(err: anyhow::Error) -> CliError { - CliError::new(err, 101) - } -} - -impl From for CliError { - fn from(err: clap::Error) -> CliError { - let code = if err.use_stderr() { 1 } else { 0 }; - CliError::new(err.into(), code) - } -} - -// ============================================================================= -// Construction helpers - -pub fn internal(error: S) -> anyhow::Error { - InternalError::new(anyhow::format_err!("{}", error)).into() -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/flock.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/flock.rs deleted file mode 100644 index 755bcdcd9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/flock.rs +++ /dev/null @@ -1,464 +0,0 @@ -use std::fs::{File, OpenOptions}; -use std::io; -use std::io::{Read, Seek, SeekFrom, Write}; -use std::path::{Display, Path, PathBuf}; - -use crate::util::errors::CargoResult; -use crate::util::Config; -use anyhow::Context as _; -use cargo_util::paths; -use sys::*; -use termcolor::Color::Cyan; - -#[derive(Debug)] -pub struct FileLock { - f: Option, - path: PathBuf, - state: State, -} - -#[derive(PartialEq, Debug)] -enum State { - Unlocked, - Shared, - Exclusive, -} - -impl FileLock { - /// Returns the underlying file handle of this lock. - pub fn file(&self) -> &File { - self.f.as_ref().unwrap() - } - - /// Returns the underlying path that this lock points to. - /// - /// Note that special care must be taken to ensure that the path is not - /// referenced outside the lifetime of this lock. - pub fn path(&self) -> &Path { - assert_ne!(self.state, State::Unlocked); - &self.path - } - - /// Returns the parent path containing this file - pub fn parent(&self) -> &Path { - assert_ne!(self.state, State::Unlocked); - self.path.parent().unwrap() - } - - /// Removes all sibling files to this locked file. - /// - /// This can be useful if a directory is locked with a sentinel file but it - /// needs to be cleared out as it may be corrupt. - pub fn remove_siblings(&self) -> CargoResult<()> { - let path = self.path(); - for entry in path.parent().unwrap().read_dir()? { - let entry = entry?; - if Some(&entry.file_name()[..]) == path.file_name() { - continue; - } - let kind = entry.file_type()?; - if kind.is_dir() { - paths::remove_dir_all(entry.path())?; - } else { - paths::remove_file(entry.path())?; - } - } - Ok(()) - } -} - -impl Read for FileLock { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - self.file().read(buf) - } -} - -impl Seek for FileLock { - fn seek(&mut self, to: SeekFrom) -> io::Result { - self.file().seek(to) - } -} - -impl Write for FileLock { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.file().write(buf) - } - - fn flush(&mut self) -> io::Result<()> { - self.file().flush() - } -} - -impl Drop for FileLock { - fn drop(&mut self) { - if self.state != State::Unlocked { - if let Some(f) = self.f.take() { - let _ = unlock(&f); - } - } - } -} - -/// A "filesystem" is intended to be a globally shared, hence locked, resource -/// in Cargo. -/// -/// The `Path` of a filesystem cannot be learned unless it's done in a locked -/// fashion, and otherwise functions on this structure are prepared to handle -/// concurrent invocations across multiple instances of Cargo. -#[derive(Clone, Debug)] -pub struct Filesystem { - root: PathBuf, -} - -impl Filesystem { - /// Creates a new filesystem to be rooted at the given path. - pub fn new(path: PathBuf) -> Filesystem { - Filesystem { root: path } - } - - /// Like `Path::join`, creates a new filesystem rooted at this filesystem - /// joined with the given path. - pub fn join>(&self, other: T) -> Filesystem { - Filesystem::new(self.root.join(other)) - } - - /// Like `Path::push`, pushes a new path component onto this filesystem. - pub fn push>(&mut self, other: T) { - self.root.push(other); - } - - /// Consumes this filesystem and returns the underlying `PathBuf`. - /// - /// Note that this is a relatively dangerous operation and should be used - /// with great caution!. - pub fn into_path_unlocked(self) -> PathBuf { - self.root - } - - /// Returns the underlying `Path`. - /// - /// Note that this is a relatively dangerous operation and should be used - /// with great caution!. - pub fn as_path_unlocked(&self) -> &Path { - &self.root - } - - /// Creates the directory pointed to by this filesystem. - /// - /// Handles errors where other Cargo processes are also attempting to - /// concurrently create this directory. - pub fn create_dir(&self) -> CargoResult<()> { - paths::create_dir_all(&self.root) - } - - /// Returns an adaptor that can be used to print the path of this - /// filesystem. - pub fn display(&self) -> Display<'_> { - self.root.display() - } - - /// Opens exclusive access to a file, returning the locked version of a - /// file. - /// - /// This function will create a file at `path` if it doesn't already exist - /// (including intermediate directories), and then it will acquire an - /// exclusive lock on `path`. If the process must block waiting for the - /// lock, the `msg` is printed to `config`. - /// - /// The returned file can be accessed to look at the path and also has - /// read/write access to the underlying file. - pub fn open_rw

(&self, path: P, config: &Config, msg: &str) -> CargoResult - where - P: AsRef, - { - self.open( - path.as_ref(), - OpenOptions::new().read(true).write(true).create(true), - State::Exclusive, - config, - msg, - ) - } - - /// Opens shared access to a file, returning the locked version of a file. - /// - /// This function will fail if `path` doesn't already exist, but if it does - /// then it will acquire a shared lock on `path`. If the process must block - /// waiting for the lock, the `msg` is printed to `config`. - /// - /// The returned file can be accessed to look at the path and also has read - /// access to the underlying file. Any writes to the file will return an - /// error. - pub fn open_ro

(&self, path: P, config: &Config, msg: &str) -> CargoResult - where - P: AsRef, - { - self.open( - path.as_ref(), - OpenOptions::new().read(true), - State::Shared, - config, - msg, - ) - } - - fn open( - &self, - path: &Path, - opts: &OpenOptions, - state: State, - config: &Config, - msg: &str, - ) -> CargoResult { - let path = self.root.join(path); - - // If we want an exclusive lock then if we fail because of NotFound it's - // likely because an intermediate directory didn't exist, so try to - // create the directory and then continue. - let f = opts - .open(&path) - .or_else(|e| { - if e.kind() == io::ErrorKind::NotFound && state == State::Exclusive { - paths::create_dir_all(path.parent().unwrap())?; - Ok(opts.open(&path)?) - } else { - Err(anyhow::Error::from(e)) - } - }) - .with_context(|| format!("failed to open: {}", path.display()))?; - match state { - State::Exclusive => { - acquire(config, msg, &path, &|| try_lock_exclusive(&f), &|| { - lock_exclusive(&f) - })?; - } - State::Shared => { - acquire(config, msg, &path, &|| try_lock_shared(&f), &|| { - lock_shared(&f) - })?; - } - State::Unlocked => {} - } - Ok(FileLock { - f: Some(f), - path, - state, - }) - } -} - -impl PartialEq for Filesystem { - fn eq(&self, other: &Path) -> bool { - self.root == other - } -} - -impl PartialEq for Path { - fn eq(&self, other: &Filesystem) -> bool { - self == other.root - } -} - -/// Acquires a lock on a file in a "nice" manner. -/// -/// Almost all long-running blocking actions in Cargo have a status message -/// associated with them as we're not sure how long they'll take. Whenever a -/// conflicted file lock happens, this is the case (we're not sure when the lock -/// will be released). -/// -/// This function will acquire the lock on a `path`, printing out a nice message -/// to the console if we have to wait for it. It will first attempt to use `try` -/// to acquire a lock on the crate, and in the case of contention it will emit a -/// status message based on `msg` to `config`'s shell, and then use `block` to -/// block waiting to acquire a lock. -/// -/// Returns an error if the lock could not be acquired or if any error other -/// than a contention error happens. -fn acquire( - config: &Config, - msg: &str, - path: &Path, - lock_try: &dyn Fn() -> io::Result<()>, - lock_block: &dyn Fn() -> io::Result<()>, -) -> CargoResult<()> { - // File locking on Unix is currently implemented via `flock`, which is known - // to be broken on NFS. We could in theory just ignore errors that happen on - // NFS, but apparently the failure mode [1] for `flock` on NFS is **blocking - // forever**, even if the "non-blocking" flag is passed! - // - // As a result, we just skip all file locks entirely on NFS mounts. That - // should avoid calling any `flock` functions at all, and it wouldn't work - // there anyway. - // - // [1]: https://github.com/rust-lang/cargo/issues/2615 - if is_on_nfs_mount(path) { - return Ok(()); - } - - match lock_try() { - Ok(()) => return Ok(()), - - // In addition to ignoring NFS which is commonly not working we also - // just ignore locking on filesystems that look like they don't - // implement file locking. - Err(e) if error_unsupported(&e) => return Ok(()), - - Err(e) => { - if !error_contended(&e) { - let e = anyhow::Error::from(e); - let cx = format!("failed to lock file: {}", path.display()); - return Err(e.context(cx)); - } - } - } - let msg = format!("waiting for file lock on {}", msg); - config.shell().status_with_color("Blocking", &msg, Cyan)?; - - lock_block().with_context(|| format!("failed to lock file: {}", path.display()))?; - return Ok(()); - - #[cfg(all(target_os = "linux", not(target_env = "musl")))] - fn is_on_nfs_mount(path: &Path) -> bool { - use std::ffi::CString; - use std::mem; - use std::os::unix::prelude::*; - - let path = match CString::new(path.as_os_str().as_bytes()) { - Ok(path) => path, - Err(_) => return false, - }; - - unsafe { - let mut buf: libc::statfs = mem::zeroed(); - let r = libc::statfs(path.as_ptr(), &mut buf); - - r == 0 && buf.f_type as u32 == libc::NFS_SUPER_MAGIC as u32 - } - } - - #[cfg(any(not(target_os = "linux"), target_env = "musl"))] - fn is_on_nfs_mount(_path: &Path) -> bool { - false - } -} - -#[cfg(unix)] -mod sys { - use std::fs::File; - use std::io::{Error, Result}; - use std::os::unix::io::AsRawFd; - - pub(super) fn lock_shared(file: &File) -> Result<()> { - flock(file, libc::LOCK_SH) - } - - pub(super) fn lock_exclusive(file: &File) -> Result<()> { - flock(file, libc::LOCK_EX) - } - - pub(super) fn try_lock_shared(file: &File) -> Result<()> { - flock(file, libc::LOCK_SH | libc::LOCK_NB) - } - - pub(super) fn try_lock_exclusive(file: &File) -> Result<()> { - flock(file, libc::LOCK_EX | libc::LOCK_NB) - } - - pub(super) fn unlock(file: &File) -> Result<()> { - flock(file, libc::LOCK_UN) - } - - pub(super) fn error_contended(err: &Error) -> bool { - err.raw_os_error().map_or(false, |x| x == libc::EWOULDBLOCK) - } - - pub(super) fn error_unsupported(err: &Error) -> bool { - match err.raw_os_error() { - // Unfortunately, depending on the target, these may or may not be the same. - // For targets in which they are the same, the duplicate pattern causes a warning. - #[allow(unreachable_patterns)] - Some(libc::ENOTSUP | libc::EOPNOTSUPP) => true, - #[cfg(target_os = "linux")] - Some(libc::ENOSYS) => true, - _ => false, - } - } - - #[cfg(not(target_os = "solaris"))] - fn flock(file: &File, flag: libc::c_int) -> Result<()> { - let ret = unsafe { libc::flock(file.as_raw_fd(), flag) }; - if ret < 0 { - Err(Error::last_os_error()) - } else { - Ok(()) - } - } - - #[cfg(target_os = "solaris")] - fn flock(file: &File, flag: libc::c_int) -> Result<()> { - // Solaris lacks flock(), so simply succeed with a no-op - Ok(()) - } -} - -#[cfg(windows)] -mod sys { - use std::fs::File; - use std::io::{Error, Result}; - use std::mem; - use std::os::windows::io::AsRawHandle; - - use winapi::shared::minwindef::DWORD; - use winapi::shared::winerror::{ERROR_INVALID_FUNCTION, ERROR_LOCK_VIOLATION}; - use winapi::um::fileapi::{LockFileEx, UnlockFile}; - use winapi::um::minwinbase::{LOCKFILE_EXCLUSIVE_LOCK, LOCKFILE_FAIL_IMMEDIATELY}; - - pub(super) fn lock_shared(file: &File) -> Result<()> { - lock_file(file, 0) - } - - pub(super) fn lock_exclusive(file: &File) -> Result<()> { - lock_file(file, LOCKFILE_EXCLUSIVE_LOCK) - } - - pub(super) fn try_lock_shared(file: &File) -> Result<()> { - lock_file(file, LOCKFILE_FAIL_IMMEDIATELY) - } - - pub(super) fn try_lock_exclusive(file: &File) -> Result<()> { - lock_file(file, LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY) - } - - pub(super) fn error_contended(err: &Error) -> bool { - err.raw_os_error() - .map_or(false, |x| x == ERROR_LOCK_VIOLATION as i32) - } - - pub(super) fn error_unsupported(err: &Error) -> bool { - err.raw_os_error() - .map_or(false, |x| x == ERROR_INVALID_FUNCTION as i32) - } - - pub(super) fn unlock(file: &File) -> Result<()> { - unsafe { - let ret = UnlockFile(file.as_raw_handle(), 0, 0, !0, !0); - if ret == 0 { - Err(Error::last_os_error()) - } else { - Ok(()) - } - } - } - - fn lock_file(file: &File, flags: DWORD) -> Result<()> { - unsafe { - let mut overlapped = mem::zeroed(); - let ret = LockFileEx(file.as_raw_handle(), flags, 0, !0, !0, &mut overlapped); - if ret == 0 { - Err(Error::last_os_error()) - } else { - Ok(()) - } - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/graph.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/graph.rs deleted file mode 100644 index ff4018201..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/graph.rs +++ /dev/null @@ -1,178 +0,0 @@ -use std::borrow::Borrow; -use std::collections::BTreeSet; -use std::fmt; - -pub struct Graph { - nodes: im_rc::OrdMap>, -} - -impl Graph { - pub fn new() -> Graph { - Graph { - nodes: im_rc::OrdMap::new(), - } - } - - pub fn add(&mut self, node: N) { - self.nodes.entry(node).or_insert_with(im_rc::OrdMap::new); - } - - pub fn link(&mut self, node: N, child: N) -> &mut E { - self.nodes - .entry(node) - .or_insert_with(im_rc::OrdMap::new) - .entry(child) - .or_insert_with(Default::default) - } - - pub fn contains(&self, k: &Q) -> bool - where - N: Borrow, - Q: Ord + Eq, - { - self.nodes.contains_key(k) - } - - pub fn edge(&self, from: &N, to: &N) -> Option<&E> { - self.nodes.get(from)?.get(to) - } - - pub fn edges(&self, from: &N) -> impl Iterator { - self.nodes.get(from).into_iter().flat_map(|x| x.iter()) - } - - /// A topological sort of the `Graph` - pub fn sort(&self) -> Vec { - let mut ret = Vec::new(); - let mut marks = BTreeSet::new(); - - for node in self.nodes.keys() { - self.sort_inner_visit(node, &mut ret, &mut marks); - } - - ret - } - - fn sort_inner_visit(&self, node: &N, dst: &mut Vec, marks: &mut BTreeSet) { - if !marks.insert(node.clone()) { - return; - } - - for child in self.nodes[node].keys() { - self.sort_inner_visit(child, dst, marks); - } - - dst.push(node.clone()); - } - - pub fn iter(&self) -> impl Iterator { - self.nodes.keys() - } - - /// Checks if there is a path from `from` to `to`. - pub fn is_path_from_to<'a>(&'a self, from: &'a N, to: &'a N) -> bool { - let mut stack = vec![from]; - let mut seen = BTreeSet::new(); - seen.insert(from); - while let Some(iter) = stack.pop().and_then(|p| self.nodes.get(p)) { - for p in iter.keys() { - if p == to { - return true; - } - if seen.insert(p) { - stack.push(p); - } - } - } - false - } - - /// Resolves one of the paths from the given dependent package down to - /// a leaf. - /// - /// Each element contains a node along with an edge except the first one. - /// The representation would look like: - /// - /// (Node0,) -> (Node1, Edge01) -> (Node2, Edge12)... - pub fn path_to_bottom<'a>(&'a self, mut pkg: &'a N) -> Vec<(&'a N, Option<&'a E>)> { - let mut result = vec![(pkg, None)]; - while let Some(p) = self.nodes.get(pkg).and_then(|p| { - p.iter() - // Note that we can have "cycles" introduced through dev-dependency - // edges, so make sure we don't loop infinitely. - .find(|&(node, _)| result.iter().all(|p| p.0 != node)) - .map(|(node, edge)| (node, Some(edge))) - }) { - result.push(p); - pkg = p.0; - } - result - } - - /// Resolves one of the paths from the given dependent package up to - /// the root. - /// - /// Each element contains a node along with an edge except the first one. - /// The representation would look like: - /// - /// (Node0,) -> (Node1, Edge01) -> (Node2, Edge12)... - pub fn path_to_top<'a>(&'a self, mut pkg: &'a N) -> Vec<(&'a N, Option<&'a E>)> { - // Note that this implementation isn't the most robust per se, we'll - // likely have to tweak this over time. For now though it works for what - // it's used for! - let mut result = vec![(pkg, None)]; - let first_pkg_depending_on = |pkg, res: &[(&N, Option<&E>)]| { - self.nodes - .iter() - .filter(|(_, adjacent)| adjacent.contains_key(pkg)) - // Note that we can have "cycles" introduced through dev-dependency - // edges, so make sure we don't loop infinitely. - .find(|&(node, _)| !res.iter().any(|p| p.0 == node)) - .map(|(p, adjacent)| (p, adjacent.get(pkg))) - }; - while let Some(p) = first_pkg_depending_on(pkg, &result) { - result.push(p); - pkg = p.0; - } - result - } -} - -impl Default for Graph { - fn default() -> Graph { - Graph::new() - } -} - -impl fmt::Debug for Graph { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - writeln!(fmt, "Graph {{")?; - - for (n, e) in &self.nodes { - writeln!(fmt, " - {}", n)?; - - for n in e.keys() { - writeln!(fmt, " - {}", n)?; - } - } - - write!(fmt, "}}")?; - - Ok(()) - } -} - -impl PartialEq for Graph { - fn eq(&self, other: &Graph) -> bool { - self.nodes.eq(&other.nodes) - } -} -impl Eq for Graph {} - -impl Clone for Graph { - fn clone(&self) -> Graph { - Graph { - nodes: self.nodes.clone(), - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/hasher.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/hasher.rs deleted file mode 100644 index 01e15ae2c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/hasher.rs +++ /dev/null @@ -1,24 +0,0 @@ -//! Implementation of a hasher that produces the same values across releases. -//! -//! The hasher should be fast and have a low chance of collisions (but is not -//! sufficient for cryptographic purposes). -#![allow(deprecated)] - -use std::hash::{Hasher, SipHasher}; - -pub struct StableHasher(SipHasher); - -impl StableHasher { - pub fn new() -> StableHasher { - StableHasher(SipHasher::new()) - } -} - -impl Hasher for StableHasher { - fn finish(&self) -> u64 { - self.0.finish() - } - fn write(&mut self, bytes: &[u8]) { - self.0.write(bytes) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/hex.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/hex.rs deleted file mode 100644 index 2d06d9b59..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/hex.rs +++ /dev/null @@ -1,31 +0,0 @@ -use super::StableHasher; -use std::fs::File; -use std::hash::{Hash, Hasher}; -use std::io::Read; - -pub fn to_hex(num: u64) -> String { - hex::encode(num.to_le_bytes()) -} - -pub fn hash_u64(hashable: H) -> u64 { - let mut hasher = StableHasher::new(); - hashable.hash(&mut hasher); - hasher.finish() -} - -pub fn hash_u64_file(mut file: &File) -> std::io::Result { - let mut hasher = StableHasher::new(); - let mut buf = [0; 64 * 1024]; - loop { - let n = file.read(&mut buf)?; - if n == 0 { - break; - } - hasher.write(&buf[..n]); - } - Ok(hasher.finish()) -} - -pub fn short_hash(hashable: &H) -> String { - to_hex(hash_u64(hashable)) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/important_paths.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/important_paths.rs deleted file mode 100644 index 224c4ab8b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/important_paths.rs +++ /dev/null @@ -1,45 +0,0 @@ -use crate::util::errors::CargoResult; -use cargo_util::paths; -use std::path::{Path, PathBuf}; - -/// Finds the root `Cargo.toml`. -pub fn find_root_manifest_for_wd(cwd: &Path) -> CargoResult { - let valid_cargo_toml_file_name = "Cargo.toml"; - let invalid_cargo_toml_file_name = "cargo.toml"; - let mut invalid_cargo_toml_path_exists = false; - - for current in paths::ancestors(cwd, None) { - let manifest = current.join(valid_cargo_toml_file_name); - if manifest.exists() { - return Ok(manifest); - } - if current.join(invalid_cargo_toml_file_name).exists() { - invalid_cargo_toml_path_exists = true; - } - } - - if invalid_cargo_toml_path_exists { - anyhow::bail!( - "could not find `{}` in `{}` or any parent directory, but found cargo.toml please try to rename it to Cargo.toml", - valid_cargo_toml_file_name, - cwd.display() - ) - } else { - anyhow::bail!( - "could not find `{}` in `{}` or any parent directory", - valid_cargo_toml_file_name, - cwd.display() - ) - } -} - -/// Returns the path to the `file` in `pwd`, if it exists. -pub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult { - let manifest = pwd.join(file); - - if manifest.exists() { - Ok(manifest) - } else { - anyhow::bail!("Could not find `{}` in `{}`", file, pwd.display()) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/interning.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/interning.rs deleted file mode 100644 index bbec12942..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/interning.rs +++ /dev/null @@ -1,182 +0,0 @@ -use serde::{Serialize, Serializer}; -use std::borrow::Borrow; -use std::cmp::Ordering; -use std::collections::HashSet; -use std::ffi::OsStr; -use std::fmt; -use std::hash::{Hash, Hasher}; -use std::ops::Deref; -use std::path::Path; -use std::ptr; -use std::str; -use std::sync::Mutex; - -fn leak(s: String) -> &'static str { - Box::leak(s.into_boxed_str()) -} - -lazy_static::lazy_static! { - static ref STRING_CACHE: Mutex> = Mutex::new(HashSet::new()); -} - -#[derive(Clone, Copy)] -pub struct InternedString { - inner: &'static str, -} - -impl<'a> From<&'a str> for InternedString { - fn from(item: &'a str) -> Self { - InternedString::new(item) - } -} - -impl<'a> From<&'a String> for InternedString { - fn from(item: &'a String) -> Self { - InternedString::new(item) - } -} - -impl From for InternedString { - fn from(item: String) -> Self { - InternedString::new(&item) - } -} - -impl PartialEq for InternedString { - fn eq(&self, other: &InternedString) -> bool { - ptr::eq(self.as_str(), other.as_str()) - } -} - -impl PartialEq for InternedString { - fn eq(&self, other: &str) -> bool { - *self == other - } -} - -impl<'a> PartialEq<&'a str> for InternedString { - fn eq(&self, other: &&str) -> bool { - **self == **other - } -} - -impl Eq for InternedString {} - -impl InternedString { - pub fn new(str: &str) -> InternedString { - let mut cache = STRING_CACHE.lock().unwrap(); - let s = cache.get(str).cloned().unwrap_or_else(|| { - let s = leak(str.to_string()); - cache.insert(s); - s - }); - - InternedString { inner: s } - } - - pub fn as_str(&self) -> &'static str { - self.inner - } -} - -impl Deref for InternedString { - type Target = str; - - fn deref(&self) -> &'static str { - self.as_str() - } -} - -impl AsRef for InternedString { - fn as_ref(&self) -> &str { - self.as_str() - } -} - -impl AsRef for InternedString { - fn as_ref(&self) -> &OsStr { - self.as_str().as_ref() - } -} - -impl AsRef for InternedString { - fn as_ref(&self) -> &Path { - self.as_str().as_ref() - } -} - -impl Hash for InternedString { - // N.B., we can't implement this as `identity(self).hash(state)`, - // because we use this for on-disk fingerprints and so need - // stability across Cargo invocations. - fn hash(&self, state: &mut H) { - self.as_str().hash(state); - } -} - -impl Borrow for InternedString { - // If we implement Hash as `identity(self).hash(state)`, - // then this will need to be removed. - fn borrow(&self) -> &str { - self.as_str() - } -} - -impl fmt::Debug for InternedString { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(self.as_str(), f) - } -} - -impl fmt::Display for InternedString { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(self.as_str(), f) - } -} - -impl Ord for InternedString { - fn cmp(&self, other: &InternedString) -> Ordering { - self.as_str().cmp(other.as_str()) - } -} - -impl PartialOrd for InternedString { - fn partial_cmp(&self, other: &InternedString) -> Option { - Some(self.cmp(other)) - } -} - -impl Serialize for InternedString { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(self.inner) - } -} - -struct InternedStringVisitor; - -impl<'de> serde::Deserialize<'de> for InternedString { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - deserializer.deserialize_str(InternedStringVisitor) - } -} - -impl<'de> serde::de::Visitor<'de> for InternedStringVisitor { - type Value = InternedString; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("an String like thing") - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, - { - Ok(InternedString::new(v)) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/into_url.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/into_url.rs deleted file mode 100644 index 26f365ee8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/into_url.rs +++ /dev/null @@ -1,30 +0,0 @@ -use std::path::{Path, PathBuf}; - -use url::Url; - -use crate::util::CargoResult; - -/// A type that can be converted to a Url -pub trait IntoUrl { - /// Performs the conversion - fn into_url(self) -> CargoResult; -} - -impl<'a> IntoUrl for &'a str { - fn into_url(self) -> CargoResult { - Url::parse(self).map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s)) - } -} - -impl<'a> IntoUrl for &'a Path { - fn into_url(self) -> CargoResult { - Url::from_file_path(self) - .map_err(|()| anyhow::format_err!("invalid path url `{}`", self.display())) - } -} - -impl<'a> IntoUrl for &'a PathBuf { - fn into_url(self) -> CargoResult { - self.as_path().into_url() - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/into_url_with_base.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/into_url_with_base.rs deleted file mode 100644 index 63037bdf6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/into_url_with_base.rs +++ /dev/null @@ -1,50 +0,0 @@ -use crate::util::{CargoResult, IntoUrl}; - -use url::Url; - -/// A type that can be interpreted as a relative Url and converted to -/// a Url. -pub trait IntoUrlWithBase { - /// Performs the conversion - fn into_url_with_base(self, base: Option) -> CargoResult; -} - -impl<'a> IntoUrlWithBase for &'a str { - fn into_url_with_base(self, base: Option) -> CargoResult { - let base_url = match base { - Some(base) => Some( - base.into_url() - .map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s))?, - ), - None => None, - }; - - Url::options() - .base_url(base_url.as_ref()) - .parse(self) - .map_err(|s| anyhow::format_err!("invalid url `{}`: {}", self, s)) - } -} - -#[cfg(test)] -mod tests { - use crate::util::IntoUrlWithBase; - - #[test] - fn into_url_with_base() { - assert_eq!( - "rel/path" - .into_url_with_base(Some("file:///abs/path/")) - .unwrap() - .to_string(), - "file:///abs/path/rel/path" - ); - assert_eq!( - "rel/path" - .into_url_with_base(Some("file:///abs/path/popped-file")) - .unwrap() - .to_string(), - "file:///abs/path/rel/path" - ); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/job.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/job.rs deleted file mode 100644 index 7a9cd1bca..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/job.rs +++ /dev/null @@ -1,141 +0,0 @@ -//! Job management (mostly for windows) -//! -//! Most of the time when you're running cargo you expect Ctrl-C to actually -//! terminate the entire tree of processes in play, not just the one at the top -//! (cargo). This currently works "by default" on Unix platforms because Ctrl-C -//! actually sends a signal to the *process group* rather than the parent -//! process, so everything will get torn down. On Windows, however, this does -//! not happen and Ctrl-C just kills cargo. -//! -//! To achieve the same semantics on Windows we use Job Objects to ensure that -//! all processes die at the same time. Job objects have a mode of operation -//! where when all handles to the object are closed it causes all child -//! processes associated with the object to be terminated immediately. -//! Conveniently whenever a process in the job object spawns a new process the -//! child will be associated with the job object as well. This means if we add -//! ourselves to the job object we create then everything will get torn down! - -pub use self::imp::Setup; - -pub fn setup() -> Option { - unsafe { imp::setup() } -} - -#[cfg(unix)] -mod imp { - use std::env; - - pub type Setup = (); - - pub unsafe fn setup() -> Option<()> { - // There's a test case for the behavior of - // when-cargo-is-killed-subprocesses-are-also-killed, but that requires - // one cargo spawned to become its own session leader, so we do that - // here. - if env::var("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE").is_ok() { - libc::setsid(); - } - Some(()) - } -} - -#[cfg(windows)] -mod imp { - use std::io; - use std::mem; - use std::ptr; - - use log::info; - - use winapi::shared::minwindef::*; - use winapi::um::handleapi::*; - use winapi::um::jobapi2::*; - use winapi::um::processthreadsapi::*; - use winapi::um::winnt::HANDLE; - use winapi::um::winnt::*; - - pub struct Setup { - job: Handle, - } - - pub struct Handle { - inner: HANDLE, - } - - fn last_err() -> io::Error { - io::Error::last_os_error() - } - - pub unsafe fn setup() -> Option { - // Creates a new job object for us to use and then adds ourselves to it. - // Note that all errors are basically ignored in this function, - // intentionally. Job objects are "relatively new" in Windows, - // particularly the ability to support nested job objects. Older - // Windows installs don't support this ability. We probably don't want - // to force Cargo to abort in this situation or force others to *not* - // use job objects, so we instead just ignore errors and assume that - // we're otherwise part of someone else's job object in this case. - - let job = CreateJobObjectW(ptr::null_mut(), ptr::null()); - if job.is_null() { - return None; - } - let job = Handle { inner: job }; - - // Indicate that when all handles to the job object are gone that all - // process in the object should be killed. Note that this includes our - // entire process tree by default because we've added ourselves and - // our children will reside in the job once we spawn a process. - let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; - info = mem::zeroed(); - info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; - let r = SetInformationJobObject( - job.inner, - JobObjectExtendedLimitInformation, - &mut info as *mut _ as LPVOID, - mem::size_of_val(&info) as DWORD, - ); - if r == 0 { - return None; - } - - // Assign our process to this job object, meaning that our children will - // now live or die based on our existence. - let me = GetCurrentProcess(); - let r = AssignProcessToJobObject(job.inner, me); - if r == 0 { - return None; - } - - Some(Setup { job }) - } - - impl Drop for Setup { - fn drop(&mut self) { - // On normal exits (not ctrl-c), we don't want to kill any child - // processes. The destructor here configures our job object to - // **not** kill everything on close, then closes the job object. - unsafe { - let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION; - info = mem::zeroed(); - let r = SetInformationJobObject( - self.job.inner, - JobObjectExtendedLimitInformation, - &mut info as *mut _ as LPVOID, - mem::size_of_val(&info) as DWORD, - ); - if r == 0 { - info!("failed to configure job object to defaults: {}", last_err()); - } - } - } - } - - impl Drop for Handle { - fn drop(&mut self) { - unsafe { - CloseHandle(self.inner); - } - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/lev_distance.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/lev_distance.rs deleted file mode 100644 index 8dcef4a89..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/lev_distance.rs +++ /dev/null @@ -1,93 +0,0 @@ -use std::cmp; - -pub fn lev_distance(me: &str, t: &str) -> usize { - // Comparing the strings lowercased will result in a difference in capitalization being less distance away - // than being a completely different letter. Otherwise `CHECK` is as far away from `check` as it - // is from `build` (both with a distance of 5). For a single letter shortcut (e.g. `b` or `c`), they will - // all be as far away from any capital single letter entry (all with a distance of 1). - // By first lowercasing the strings, `C` and `c` are closer than `C` and `b`, for example. - let me = me.to_lowercase(); - let t = t.to_lowercase(); - - let t_len = t.chars().count(); - if me.is_empty() { - return t_len; - } - if t.is_empty() { - return me.chars().count(); - } - - let mut dcol = (0..=t_len).collect::>(); - let mut t_last = 0; - - for (i, sc) in me.chars().enumerate() { - let mut current = i; - dcol[0] = current + 1; - - for (j, tc) in t.chars().enumerate() { - let next = dcol[j + 1]; - - if sc == tc { - dcol[j + 1] = current; - } else { - dcol[j + 1] = cmp::min(current, next); - dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1; - } - - current = next; - t_last = j; - } - } - - dcol[t_last + 1] -} - -/// Find the closest element from `iter` matching `choice`. The `key` callback -/// is used to select a `&str` from the iterator to compare against `choice`. -pub fn closest<'a, T>( - choice: &str, - iter: impl Iterator, - key: impl Fn(&T) -> &'a str, -) -> Option { - // Only consider candidates with a lev_distance of 3 or less so we don't - // suggest out-of-the-blue options. - iter.map(|e| (lev_distance(choice, key(&e)), e)) - .filter(|&(d, _)| d < 4) - .min_by_key(|t| t.0) - .map(|t| t.1) -} - -/// Version of `closest` that returns a common "suggestion" that can be tacked -/// onto the end of an error message. -pub fn closest_msg<'a, T>( - choice: &str, - iter: impl Iterator, - key: impl Fn(&T) -> &'a str, -) -> String { - match closest(choice, iter, &key) { - Some(e) => format!("\n\n\tDid you mean `{}`?", key(&e)), - None => String::new(), - } -} - -#[test] -fn test_lev_distance() { - use std::char::{from_u32, MAX}; - // Test bytelength agnosticity - for c in (0u32..MAX as u32) - .filter_map(from_u32) - .map(|i| i.to_string()) - { - assert_eq!(lev_distance(&c, &c), 0); - } - - let a = "\nMรคry hรคd รค little lรคmb\n\nLittle lรคmb\n"; - let b = "\nMary hรคd รค little lรคmb\n\nLittle lรคmb\n"; - let c = "Mary hรคd รค little lรคmb\n\nLittle lรคmb\n"; - assert_eq!(lev_distance(a, b), 1); - assert_eq!(lev_distance(b, a), 1); - assert_eq!(lev_distance(a, c), 2); - assert_eq!(lev_distance(c, a), 2); - assert_eq!(lev_distance(b, c), 1); - assert_eq!(lev_distance(c, b), 1); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/lockserver.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/lockserver.rs deleted file mode 100644 index 14911556a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/lockserver.rs +++ /dev/null @@ -1,171 +0,0 @@ -//! An implementation of IPC locks, guaranteed to be released if a process dies -//! -//! This module implements a locking server/client where the main `cargo fix` -//! process will start up a server and then all the client processes will -//! connect to it. The main purpose of this file is to ensure that each crate -//! (aka file entry point) is only fixed by one process at a time, currently -//! concurrent fixes can't happen. -//! -//! The basic design here is to use a TCP server which is pretty portable across -//! platforms. For simplicity it just uses threads as well. Clients connect to -//! the main server, inform the server what its name is, and then wait for the -//! server to give it the lock (aka write a byte). - -use std::collections::HashMap; -use std::io::{BufRead, BufReader, Read, Write}; -use std::net::{SocketAddr, TcpListener, TcpStream}; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::{Arc, Mutex}; -use std::thread::{self, JoinHandle}; - -use anyhow::{Context, Error}; - -pub struct LockServer { - listener: TcpListener, - addr: SocketAddr, - threads: HashMap, - done: Arc, -} - -pub struct LockServerStarted { - done: Arc, - addr: SocketAddr, - thread: Option>, -} - -pub struct LockServerClient { - _socket: TcpStream, -} - -struct ServerClient { - thread: Option>, - lock: Arc)>>, -} - -impl LockServer { - pub fn new() -> Result { - let listener = TcpListener::bind("127.0.0.1:0") - .with_context(|| "failed to bind TCP listener to manage locking")?; - let addr = listener.local_addr()?; - Ok(LockServer { - listener, - addr, - threads: HashMap::new(), - done: Arc::new(AtomicBool::new(false)), - }) - } - - pub fn addr(&self) -> &SocketAddr { - &self.addr - } - - pub fn start(self) -> Result { - let addr = self.addr; - let done = self.done.clone(); - let thread = thread::spawn(|| { - self.run(); - }); - Ok(LockServerStarted { - addr, - thread: Some(thread), - done, - }) - } - - fn run(mut self) { - while let Ok((client, _)) = self.listener.accept() { - if self.done.load(Ordering::SeqCst) { - break; - } - - // Learn the name of our connected client to figure out if it needs - // to wait for another process to release the lock. - let mut client = BufReader::new(client); - let mut name = String::new(); - if client.read_line(&mut name).is_err() { - continue; - } - let client = client.into_inner(); - - // If this "named mutex" is already registered and the thread is - // still going, put it on the queue. Otherwise wait on the previous - // thread and we'll replace it just below. - if let Some(t) = self.threads.get_mut(&name) { - let mut state = t.lock.lock().unwrap(); - if state.0 { - state.1.push(client); - continue; - } - drop(t.thread.take().unwrap().join()); - } - - let lock = Arc::new(Mutex::new((true, vec![client]))); - let lock2 = lock.clone(); - let thread = thread::spawn(move || { - loop { - let mut client = { - let mut state = lock2.lock().unwrap(); - if state.1.is_empty() { - state.0 = false; - break; - } else { - state.1.remove(0) - } - }; - // Inform this client that it now has the lock and wait for - // it to disconnect by waiting for EOF. - if client.write_all(&[1]).is_err() { - continue; - } - let mut dst = Vec::new(); - drop(client.read_to_end(&mut dst)); - } - }); - - self.threads.insert( - name, - ServerClient { - thread: Some(thread), - lock, - }, - ); - } - } -} - -impl Drop for LockServer { - fn drop(&mut self) { - for (_, mut client) in self.threads.drain() { - if let Some(thread) = client.thread.take() { - drop(thread.join()); - } - } - } -} - -impl Drop for LockServerStarted { - fn drop(&mut self) { - self.done.store(true, Ordering::SeqCst); - // Ignore errors here as this is largely best-effort - if TcpStream::connect(&self.addr).is_err() { - return; - } - drop(self.thread.take().unwrap().join()); - } -} - -impl LockServerClient { - pub fn lock(addr: &SocketAddr, name: impl AsRef<[u8]>) -> Result { - let mut client = - TcpStream::connect(&addr).with_context(|| "failed to connect to parent lock server")?; - client - .write_all(name.as_ref()) - .and_then(|_| client.write_all(b"\n")) - .with_context(|| "failed to write to lock server")?; - let mut buf = [0]; - client - .read_exact(&mut buf) - .with_context(|| "failed to acquire lock")?; - Ok(LockServerClient { _socket: client }) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/machine_message.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/machine_message.rs deleted file mode 100644 index baef5167b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/machine_message.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::path::{Path, PathBuf}; - -use serde::ser; -use serde::Serialize; -use serde_json::{self, json, value::RawValue}; - -use crate::core::{compiler::CompileMode, PackageId, Target}; - -pub trait Message: ser::Serialize { - fn reason(&self) -> &str; - - fn to_json_string(&self) -> String { - let json = serde_json::to_string(self).unwrap(); - assert!(json.starts_with("{\"")); - let reason = json!(self.reason()); - format!("{{\"reason\":{},{}", reason, &json[1..]) - } -} - -#[derive(Serialize)] -pub struct FromCompiler<'a> { - pub package_id: PackageId, - pub manifest_path: &'a Path, - pub target: &'a Target, - pub message: Box, -} - -impl<'a> Message for FromCompiler<'a> { - fn reason(&self) -> &str { - "compiler-message" - } -} - -#[derive(Serialize)] -pub struct Artifact<'a> { - pub package_id: PackageId, - pub manifest_path: PathBuf, - pub target: &'a Target, - pub profile: ArtifactProfile, - pub features: Vec, - pub filenames: Vec, - pub executable: Option, - pub fresh: bool, -} - -impl<'a> Message for Artifact<'a> { - fn reason(&self) -> &str { - "compiler-artifact" - } -} - -/// This is different from the regular `Profile` to maintain backwards -/// compatibility (in particular, `test` is no longer in `Profile`, but we -/// still want it to be included here). -#[derive(Serialize)] -pub struct ArtifactProfile { - pub opt_level: &'static str, - pub debuginfo: Option, - pub debug_assertions: bool, - pub overflow_checks: bool, - pub test: bool, -} - -#[derive(Serialize)] -pub struct BuildScript<'a> { - pub package_id: PackageId, - pub linked_libs: &'a [String], - pub linked_paths: &'a [String], - pub cfgs: &'a [String], - pub env: &'a [(String, String)], - pub out_dir: &'a Path, -} - -impl<'a> Message for BuildScript<'a> { - fn reason(&self) -> &str { - "build-script-executed" - } -} - -#[derive(Serialize)] -pub struct TimingInfo<'a> { - pub package_id: PackageId, - pub target: &'a Target, - pub mode: CompileMode, - pub duration: f64, - #[serde(skip_serializing_if = "Option::is_none")] - pub rmeta_time: Option, -} - -impl<'a> Message for TimingInfo<'a> { - fn reason(&self) -> &str { - "timing-info" - } -} - -#[derive(Serialize)] -pub struct BuildFinished { - pub success: bool, -} - -impl Message for BuildFinished { - fn reason(&self) -> &str { - "build-finished" - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/mod.rs deleted file mode 100644 index 4b8604f92..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/mod.rs +++ /dev/null @@ -1,109 +0,0 @@ -use std::fmt; -use std::time::Duration; - -pub use self::canonical_url::CanonicalUrl; -pub use self::config::{homedir, Config, ConfigValue}; -pub(crate) use self::counter::MetricsCounter; -pub use self::dependency_queue::DependencyQueue; -pub use self::diagnostic_server::RustfixDiagnosticServer; -pub use self::errors::{internal, CargoResult, CliResult, Test}; -pub use self::errors::{CargoTestError, CliError}; -pub use self::flock::{FileLock, Filesystem}; -pub use self::graph::Graph; -pub use self::hasher::StableHasher; -pub use self::hex::{hash_u64, short_hash, to_hex}; -pub use self::into_url::IntoUrl; -pub use self::into_url_with_base::IntoUrlWithBase; -pub use self::lev_distance::{closest, closest_msg, lev_distance}; -pub use self::lockserver::{LockServer, LockServerClient, LockServerStarted}; -pub use self::progress::{Progress, ProgressStyle}; -pub use self::queue::Queue; -pub use self::restricted_names::validate_package_name; -pub use self::rustc::Rustc; -pub use self::semver_ext::{OptVersionReq, VersionExt, VersionReqExt}; -pub use self::to_semver::ToSemver; -pub use self::vcs::{existing_vcs_repo, FossilRepo, GitRepo, HgRepo, PijulRepo}; -pub use self::workspace::{ - add_path_args, path_args, print_available_benches, print_available_binaries, - print_available_examples, print_available_packages, print_available_tests, -}; - -mod canonical_url; -pub mod command_prelude; -pub mod config; -mod counter; -pub mod cpu; -mod dependency_queue; -pub mod diagnostic_server; -pub mod errors; -mod flock; -pub mod graph; -mod hasher; -pub mod hex; -pub mod important_paths; -pub mod interning; -pub mod into_url; -mod into_url_with_base; -pub mod job; -pub mod lev_distance; -mod lockserver; -pub mod machine_message; -pub mod network; -pub mod profile; -mod progress; -mod queue; -pub mod restricted_names; -pub mod rustc; -mod semver_ext; -pub mod to_semver; -pub mod toml; -mod vcs; -mod workspace; - -pub fn elapsed(duration: Duration) -> String { - let secs = duration.as_secs(); - - if secs >= 60 { - format!("{}m {:02}s", secs / 60, secs % 60) - } else { - format!("{}.{:02}s", secs, duration.subsec_nanos() / 10_000_000) - } -} - -pub fn iter_join_onto(mut w: W, iter: I, delim: &str) -> fmt::Result -where - W: fmt::Write, - I: IntoIterator, - T: std::fmt::Display, -{ - let mut it = iter.into_iter().peekable(); - while let Some(n) = it.next() { - write!(w, "{}", n)?; - if it.peek().is_some() { - write!(w, "{}", delim)?; - } - } - Ok(()) -} - -pub fn iter_join(iter: I, delim: &str) -> String -where - I: IntoIterator, - T: std::fmt::Display, -{ - let mut s = String::new(); - let _ = iter_join_onto(&mut s, iter, delim); - s -} - -pub fn indented_lines(text: &str) -> String { - text.lines() - .map(|line| { - if line.is_empty() { - String::from("\n") - } else { - format!(" {}\n", line) - } - }) - .collect() -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/network.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/network.rs deleted file mode 100644 index 2a590bc13..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/network.rs +++ /dev/null @@ -1,147 +0,0 @@ -use anyhow::Error; - -use crate::util::errors::{CargoResult, HttpNot200}; -use crate::util::Config; - -pub struct Retry<'a> { - config: &'a Config, - remaining: u32, -} - -impl<'a> Retry<'a> { - pub fn new(config: &'a Config) -> CargoResult> { - Ok(Retry { - config, - remaining: config.net_config()?.retry.unwrap_or(2), - }) - } - - pub fn r#try(&mut self, f: impl FnOnce() -> CargoResult) -> CargoResult> { - match f() { - Err(ref e) if maybe_spurious(e) && self.remaining > 0 => { - let msg = format!( - "spurious network error ({} tries remaining): {}", - self.remaining, - e.root_cause(), - ); - self.config.shell().warn(msg)?; - self.remaining -= 1; - Ok(None) - } - other => other.map(Some), - } - } -} - -fn maybe_spurious(err: &Error) -> bool { - if let Some(git_err) = err.downcast_ref::() { - match git_err.class() { - git2::ErrorClass::Net - | git2::ErrorClass::Os - | git2::ErrorClass::Zlib - | git2::ErrorClass::Http => return true, - _ => (), - } - } - if let Some(curl_err) = err.downcast_ref::() { - if curl_err.is_couldnt_connect() - || curl_err.is_couldnt_resolve_proxy() - || curl_err.is_couldnt_resolve_host() - || curl_err.is_operation_timedout() - || curl_err.is_recv_error() - || curl_err.is_send_error() - || curl_err.is_http2_error() - || curl_err.is_http2_stream_error() - || curl_err.is_ssl_connect_error() - || curl_err.is_partial_file() - { - return true; - } - } - if let Some(not_200) = err.downcast_ref::() { - if 500 <= not_200.code && not_200.code < 600 { - return true; - } - } - false -} - -/// Wrapper method for network call retry logic. -/// -/// Retry counts provided by Config object `net.retry`. Config shell outputs -/// a warning on per retry. -/// -/// Closure must return a `CargoResult`. -/// -/// # Examples -/// -/// ``` -/// # use crate::cargo::util::{CargoResult, Config}; -/// # let download_something = || return Ok(()); -/// # let config = Config::default().unwrap(); -/// use cargo::util::network; -/// let cargo_result = network::with_retry(&config, || download_something()); -/// ``` -pub fn with_retry(config: &Config, mut callback: F) -> CargoResult -where - F: FnMut() -> CargoResult, -{ - let mut retry = Retry::new(config)?; - loop { - if let Some(ret) = retry.r#try(&mut callback)? { - return Ok(ret); - } - } -} - -#[test] -fn with_retry_repeats_the_call_then_works() { - use crate::core::Shell; - - //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry - let error1 = HttpNot200 { - code: 501, - url: "Uri".to_string(), - } - .into(); - let error2 = HttpNot200 { - code: 502, - url: "Uri".to_string(), - } - .into(); - let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; - let config = Config::default().unwrap(); - *config.shell() = Shell::from_write(Box::new(Vec::new())); - let result = with_retry(&config, || results.pop().unwrap()); - assert!(result.is_ok()) -} - -#[test] -fn with_retry_finds_nested_spurious_errors() { - use crate::core::Shell; - - //Error HTTP codes (5xx) are considered maybe_spurious and will prompt retry - //String error messages are not considered spurious - let error1 = anyhow::Error::from(HttpNot200 { - code: 501, - url: "Uri".to_string(), - }); - let error1 = anyhow::Error::from(error1.context("A non-spurious wrapping err")); - let error2 = anyhow::Error::from(HttpNot200 { - code: 502, - url: "Uri".to_string(), - }); - let error2 = anyhow::Error::from(error2.context("A second chained error")); - let mut results: Vec> = vec![Ok(()), Err(error1), Err(error2)]; - let config = Config::default().unwrap(); - *config.shell() = Shell::from_write(Box::new(Vec::new())); - let result = with_retry(&config, || results.pop().unwrap()); - assert!(result.is_ok()) -} - -#[test] -fn curle_http2_stream_is_spurious() { - let code = curl_sys::CURLE_HTTP2_STREAM; - let err = curl::Error::new(code); - assert!(maybe_spurious(&err.into())); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/profile.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/profile.rs deleted file mode 100644 index f172a2965..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/profile.rs +++ /dev/null @@ -1,85 +0,0 @@ -use std::cell::RefCell; -use std::env; -use std::fmt; -use std::io::{stdout, StdoutLock, Write}; -use std::iter::repeat; -use std::mem; -use std::time; - -thread_local!(static PROFILE_STACK: RefCell> = RefCell::new(Vec::new())); -thread_local!(static MESSAGES: RefCell> = RefCell::new(Vec::new())); - -type Message = (usize, u64, String); - -pub struct Profiler { - desc: String, -} - -fn enabled_level() -> Option { - env::var("CARGO_PROFILE").ok().and_then(|s| s.parse().ok()) -} - -pub fn start(desc: T) -> Profiler { - if enabled_level().is_none() { - return Profiler { - desc: String::new(), - }; - } - - PROFILE_STACK.with(|stack| stack.borrow_mut().push(time::Instant::now())); - - Profiler { - desc: desc.to_string(), - } -} - -impl Drop for Profiler { - fn drop(&mut self) { - let enabled = match enabled_level() { - Some(i) => i, - None => return, - }; - - let (start, stack_len) = PROFILE_STACK.with(|stack| { - let mut stack = stack.borrow_mut(); - let start = stack.pop().unwrap(); - (start, stack.len()) - }); - let duration = start.elapsed(); - let duration_ms = duration.as_secs() * 1000 + u64::from(duration.subsec_millis()); - - let msg = (stack_len, duration_ms, mem::take(&mut self.desc)); - MESSAGES.with(|msgs| msgs.borrow_mut().push(msg)); - - if stack_len == 0 { - fn print(lvl: usize, msgs: &[Message], enabled: usize, stdout: &mut StdoutLock<'_>) { - if lvl > enabled { - return; - } - let mut last = 0; - for (i, &(l, time, ref msg)) in msgs.iter().enumerate() { - if l != lvl { - continue; - } - writeln!( - stdout, - "{} {:6}ms - {}", - repeat(" ").take(lvl + 1).collect::(), - time, - msg - ) - .expect("printing profiling info to stdout"); - - print(lvl + 1, &msgs[last..i], enabled, stdout); - last = i; - } - } - let stdout = stdout(); - MESSAGES.with(|msgs| { - let mut msgs = msgs.borrow_mut(); - print(0, &msgs, enabled, &mut stdout.lock()); - msgs.clear(); - }); - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/progress.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/progress.rs deleted file mode 100644 index 4eb214674..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/progress.rs +++ /dev/null @@ -1,446 +0,0 @@ -use std::cmp; -use std::env; -use std::time::{Duration, Instant}; - -use crate::core::shell::Verbosity; -use crate::util::config::ProgressWhen; -use crate::util::{CargoResult, Config}; -use cargo_util::is_ci; -use unicode_width::UnicodeWidthChar; - -pub struct Progress<'cfg> { - state: Option>, -} - -pub enum ProgressStyle { - Percentage, - Ratio, -} - -struct Throttle { - first: bool, - last_update: Instant, -} - -struct State<'cfg> { - config: &'cfg Config, - format: Format, - name: String, - done: bool, - throttle: Throttle, - last_line: Option, - fixed_width: Option, -} - -struct Format { - style: ProgressStyle, - max_width: usize, - max_print: usize, -} - -impl<'cfg> Progress<'cfg> { - pub fn with_style(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> { - // report no progress when -q (for quiet) or TERM=dumb are set - // or if running on Continuous Integration service like Travis where the - // output logs get mangled. - let dumb = match env::var("TERM") { - Ok(term) => term == "dumb", - Err(_) => false, - }; - let progress_config = cfg.progress_config(); - match progress_config.when { - ProgressWhen::Always => return Progress::new_priv(name, style, cfg), - ProgressWhen::Never => return Progress { state: None }, - ProgressWhen::Auto => {} - } - if cfg.shell().verbosity() == Verbosity::Quiet || dumb || is_ci() { - return Progress { state: None }; - } - Progress::new_priv(name, style, cfg) - } - - fn new_priv(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> { - let progress_config = cfg.progress_config(); - let width = progress_config - .width - .or_else(|| cfg.shell().err_width().progress_max_width()); - - Progress { - state: width.map(|n| State { - config: cfg, - format: Format { - style, - max_width: n, - // 50 gives some space for text after the progress bar, - // even on narrow (e.g. 80 char) terminals. - max_print: 50, - }, - name: name.to_string(), - done: false, - throttle: Throttle::new(), - last_line: None, - fixed_width: progress_config.width, - }), - } - } - - pub fn disable(&mut self) { - self.state = None; - } - - pub fn is_enabled(&self) -> bool { - self.state.is_some() - } - - pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> { - Self::with_style(name, ProgressStyle::Percentage, cfg) - } - - pub fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> { - let s = match &mut self.state { - Some(s) => s, - None => return Ok(()), - }; - - // Don't update too often as it can cause excessive performance loss - // just putting stuff onto the terminal. We also want to avoid - // flickering by not drawing anything that goes away too quickly. As a - // result we've got two branches here: - // - // 1. If we haven't drawn anything, we wait for a period of time to - // actually start drawing to the console. This ensures that - // short-lived operations don't flicker on the console. Currently - // there's a 500ms delay to when we first draw something. - // 2. If we've drawn something, then we rate limit ourselves to only - // draw to the console every so often. Currently there's a 100ms - // delay between updates. - if !s.throttle.allowed() { - return Ok(()); - } - - s.tick(cur, max, msg) - } - - pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> { - match self.state { - Some(ref mut s) => s.tick(cur, max, msg), - None => Ok(()), - } - } - - pub fn update_allowed(&mut self) -> bool { - match &mut self.state { - Some(s) => s.throttle.allowed(), - None => false, - } - } - - pub fn print_now(&mut self, msg: &str) -> CargoResult<()> { - match &mut self.state { - Some(s) => s.print("", msg), - None => Ok(()), - } - } - - pub fn clear(&mut self) { - if let Some(ref mut s) = self.state { - s.clear(); - } - } -} - -impl Throttle { - fn new() -> Throttle { - Throttle { - first: true, - last_update: Instant::now(), - } - } - - fn allowed(&mut self) -> bool { - if self.first { - let delay = Duration::from_millis(500); - if self.last_update.elapsed() < delay { - return false; - } - } else { - let interval = Duration::from_millis(100); - if self.last_update.elapsed() < interval { - return false; - } - } - self.update(); - true - } - - fn update(&mut self) { - self.first = false; - self.last_update = Instant::now(); - } -} - -impl<'cfg> State<'cfg> { - fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> { - if self.done { - return Ok(()); - } - - if max > 0 && cur == max { - self.done = true; - } - - // Write out a pretty header, then the progress bar itself, and then - // return back to the beginning of the line for the next print. - self.try_update_max_width(); - if let Some(pbar) = self.format.progress(cur, max) { - self.print(&pbar, msg)?; - } - Ok(()) - } - - fn print(&mut self, prefix: &str, msg: &str) -> CargoResult<()> { - self.throttle.update(); - self.try_update_max_width(); - - // make sure we have enough room for the header - if self.format.max_width < 15 { - return Ok(()); - } - - let mut line = prefix.to_string(); - self.format.render(&mut line, msg); - while line.len() < self.format.max_width - 15 { - line.push(' '); - } - - // Only update if the line has changed. - if self.config.shell().is_cleared() || self.last_line.as_ref() != Some(&line) { - let mut shell = self.config.shell(); - shell.set_needs_clear(false); - shell.status_header(&self.name)?; - write!(shell.err(), "{}\r", line)?; - self.last_line = Some(line); - shell.set_needs_clear(true); - } - - Ok(()) - } - - fn clear(&mut self) { - // No need to clear if the progress is not currently being displayed. - if self.last_line.is_some() && !self.config.shell().is_cleared() { - self.config.shell().err_erase_line(); - self.last_line = None; - } - } - - fn try_update_max_width(&mut self) { - if self.fixed_width.is_none() { - if let Some(n) = self.config.shell().err_width().progress_max_width() { - self.format.max_width = n; - } - } - } -} - -impl Format { - fn progress(&self, cur: usize, max: usize) -> Option { - assert!(cur <= max); - // Render the percentage at the far right and then figure how long the - // progress bar is - let pct = (cur as f64) / (max as f64); - let pct = if !pct.is_finite() { 0.0 } else { pct }; - let stats = match self.style { - ProgressStyle::Percentage => format!(" {:6.02}%", pct * 100.0), - ProgressStyle::Ratio => format!(" {}/{}", cur, max), - }; - let extra_len = stats.len() + 2 /* [ and ] */ + 15 /* status header */; - let display_width = match self.width().checked_sub(extra_len) { - Some(n) => n, - None => return None, - }; - - let mut string = String::with_capacity(self.max_width); - string.push('['); - let hashes = display_width as f64 * pct; - let hashes = hashes as usize; - - // Draw the `===>` - if hashes > 0 { - for _ in 0..hashes - 1 { - string.push('='); - } - if cur == max { - string.push('='); - } else { - string.push('>'); - } - } - - // Draw the empty space we have left to do - for _ in 0..(display_width - hashes) { - string.push(' '); - } - string.push(']'); - string.push_str(&stats); - - Some(string) - } - - fn render(&self, string: &mut String, msg: &str) { - let mut avail_msg_len = self.max_width - string.len() - 15; - let mut ellipsis_pos = 0; - if avail_msg_len <= 3 { - return; - } - for c in msg.chars() { - let display_width = c.width().unwrap_or(0); - if avail_msg_len >= display_width { - avail_msg_len -= display_width; - string.push(c); - if avail_msg_len >= 3 { - ellipsis_pos = string.len(); - } - } else { - string.truncate(ellipsis_pos); - string.push_str("..."); - break; - } - } - } - - #[cfg(test)] - fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option { - let mut ret = self.progress(cur, max)?; - self.render(&mut ret, msg); - Some(ret) - } - - fn width(&self) -> usize { - cmp::min(self.max_width, self.max_print) - } -} - -impl<'cfg> Drop for State<'cfg> { - fn drop(&mut self) { - self.clear(); - } -} - -#[test] -fn test_progress_status() { - let format = Format { - style: ProgressStyle::Ratio, - max_print: 40, - max_width: 60, - }; - assert_eq!( - format.progress_status(0, 4, ""), - Some("[ ] 0/4".to_string()) - ); - assert_eq!( - format.progress_status(1, 4, ""), - Some("[===> ] 1/4".to_string()) - ); - assert_eq!( - format.progress_status(2, 4, ""), - Some("[========> ] 2/4".to_string()) - ); - assert_eq!( - format.progress_status(3, 4, ""), - Some("[=============> ] 3/4".to_string()) - ); - assert_eq!( - format.progress_status(4, 4, ""), - Some("[===================] 4/4".to_string()) - ); - - assert_eq!( - format.progress_status(3999, 4000, ""), - Some("[===========> ] 3999/4000".to_string()) - ); - assert_eq!( - format.progress_status(4000, 4000, ""), - Some("[=============] 4000/4000".to_string()) - ); - - assert_eq!( - format.progress_status(3, 4, ": short message"), - Some("[=============> ] 3/4: short message".to_string()) - ); - assert_eq!( - format.progress_status(3, 4, ": msg thats just fit"), - Some("[=============> ] 3/4: msg thats just fit".to_string()) - ); - assert_eq!( - format.progress_status(3, 4, ": msg that's just fit"), - Some("[=============> ] 3/4: msg that's just...".to_string()) - ); - - // combining diacritics have width zero and thus can fit max_width. - let zalgo_msg = "zฬธฬงฬขฬ—อ‰ฬฬฆอฬฑองอฆอจฬ‘ฬ…ฬŒอฅฬอขaฬขอฌอจฬฝอฏฬ…ฬ‘อฅอ‹ฬฬ‘อซฬ„อขอฬซฬฬชฬคอŽฬฑฬฃอฬญฬžฬ™ฬฑอ™อฬ˜ฬญอšlฬถฬกฬ›ฬฅฬฬฐฬญฬนฬฏฬฏฬžฬชอ‡ฬฑฬฆอ™อ”ฬ˜ฬผอ‡อ“ฬˆอจอ—องฬ“อ’อฆฬ€ฬ‡อฃฬˆอญอŠอ›ฬƒฬ‘อ’ฬฟฬ•อœgฬธฬทฬขฬฉฬปฬปอšฬ อ“ฬžฬฅออฉอŒฬ‘อฅฬŠฬฝอ‹อฬอŒอ›ฬฬ‡ฬ‘อจฬอ…oอ™ฬณฬฃอ”ฬฐฬ ฬœอ•อ•ฬžฬฆฬ™ฬญฬœฬฏฬนฬฌฬปฬ“อ‘อฆอ‹ฬˆฬ‰อŒฬƒอฏฬ€ฬ‚อ อ… ฬธฬกอŽฬฆฬฒฬ–ฬคฬบฬœฬฎฬฑฬฐฬฅอ”ฬฏฬ…ฬอฌฬ‚อจฬ‹ฬƒฬฝฬˆฬฬพฬ”ฬ‡อฃฬšอœอœhฬกอซฬฬ…ฬฟฬฬ€อœา‰ฬ›อ‡ฬญฬนฬฐฬ อ™ฬžแบฝฬถฬ™ฬนฬณฬ–อ‰อŽฬฆอ‚ฬ‹ฬ“อฎฬ”อฌฬฬ€อ‚ฬŒอ‘ฬ’อ†ฬšอœอ  อ“อ“ฬŸอฬฎฬฌฬฬฬฐอ“อŽฬผฬปอฆอฬพฬ”อ’ฬƒฬ“อŸอŸcฬฎฬฆอฬบอˆอšฬฏอ•ฬ„ฬ’อฬ‚อŠฬŠอ—อŠอคอฃฬ€อ˜ฬ•ออžoฬถออšอฬฃฬฎอŒอฆฬฝฬ‘อฉฬ…อฎฬฬฝฬอ—ฬอ‚ฬ…อชอ mฬทฬงอ–ฬปอ”ฬฅฬชฬญอ‰อ‰ฬคฬปอ–ฬฉฬคอ–ฬ˜อฆฬ‚อŒฬ†ฬ‚อฆฬ’อŠอฏอฌอŠฬ‰ฬŒอฌออกeฬตฬนฬฃอฬœฬบฬคฬคฬฏฬซฬนฬ ฬฎอŽอ™ฬฏอšฬฐฬผอ—อฬ€ฬ’อ‚ฬ‰ฬ€ฬšออžsฬตฬฒออ™อ–ฬชอ“อ“ฬบฬฑฬญฬฉฬฃอ–ฬฃอคอคอ‚ฬŽฬˆอ—อ†อจอชฬ†ฬˆอ—ออ "; - assert_eq!( - format.progress_status(3, 4, zalgo_msg), - Some("[=============> ] 3/4".to_string() + zalgo_msg) - ); - - // some non-ASCII ellipsize test - assert_eq!( - format.progress_status(3, 4, "_123456789123456e\u{301}\u{301}8\u{301}90a"), - Some("[=============> ] 3/4_123456789123456e\u{301}\u{301}...".to_string()) - ); - assert_eq!( - format.progress_status(3, 4, "๏ผšๆฏๅ€‹ๆผขๅญ—ไฝ”ๆ“šไบ†ๅ…ฉๅ€‹ๅญ—ๅ…ƒ"), - Some("[=============> ] 3/4๏ผšๆฏๅ€‹ๆผขๅญ—ไฝ”ๆ“šไบ†...".to_string()) - ); - assert_eq!( - // handle breaking at middle of character - format.progress_status(3, 4, "๏ผš-ๆฏๅ€‹ๆผขๅญ—ไฝ”ๆ“šไบ†ๅ…ฉๅ€‹ๅญ—ๅ…ƒ"), - Some("[=============> ] 3/4๏ผš-ๆฏๅ€‹ๆผขๅญ—ไฝ”ๆ“šไบ†...".to_string()) - ); -} - -#[test] -fn test_progress_status_percentage() { - let format = Format { - style: ProgressStyle::Percentage, - max_print: 40, - max_width: 60, - }; - assert_eq!( - format.progress_status(0, 77, ""), - Some("[ ] 0.00%".to_string()) - ); - assert_eq!( - format.progress_status(1, 77, ""), - Some("[ ] 1.30%".to_string()) - ); - assert_eq!( - format.progress_status(76, 77, ""), - Some("[=============> ] 98.70%".to_string()) - ); - assert_eq!( - format.progress_status(77, 77, ""), - Some("[===============] 100.00%".to_string()) - ); -} - -#[test] -fn test_progress_status_too_short() { - let format = Format { - style: ProgressStyle::Percentage, - max_print: 25, - max_width: 25, - }; - assert_eq!( - format.progress_status(1, 1, ""), - Some("[] 100.00%".to_string()) - ); - - let format = Format { - style: ProgressStyle::Percentage, - max_print: 24, - max_width: 24, - }; - assert_eq!(format.progress_status(1, 1, ""), None); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/queue.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/queue.rs deleted file mode 100644 index 66554ea59..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/queue.rs +++ /dev/null @@ -1,75 +0,0 @@ -use std::collections::VecDeque; -use std::sync::{Condvar, Mutex}; -use std::time::Duration; - -/// A simple, threadsafe, queue of items of type `T` -/// -/// This is a sort of channel where any thread can push to a queue and any -/// thread can pop from a queue. -/// -/// This supports both bounded and unbounded operations. `push` will never block, -/// and allows the queue to grow without bounds. `push_bounded` will block if the -/// queue is over capacity, and will resume once there is enough capacity. -pub struct Queue { - state: Mutex>, - popper_cv: Condvar, - bounded_cv: Condvar, - bound: usize, -} - -struct State { - items: VecDeque, -} - -impl Queue { - pub fn new(bound: usize) -> Queue { - Queue { - state: Mutex::new(State { - items: VecDeque::new(), - }), - popper_cv: Condvar::new(), - bounded_cv: Condvar::new(), - bound, - } - } - - pub fn push(&self, item: T) { - self.state.lock().unwrap().items.push_back(item); - self.popper_cv.notify_one(); - } - - /// Pushes an item onto the queue, blocking if the queue is full. - pub fn push_bounded(&self, item: T) { - let locked_state = self.state.lock().unwrap(); - let mut state = self - .bounded_cv - .wait_while(locked_state, |s| s.items.len() >= self.bound) - .unwrap(); - state.items.push_back(item); - self.popper_cv.notify_one(); - } - - pub fn pop(&self, timeout: Duration) -> Option { - let (mut state, result) = self - .popper_cv - .wait_timeout_while(self.state.lock().unwrap(), timeout, |s| s.items.is_empty()) - .unwrap(); - if result.timed_out() { - None - } else { - let value = state.items.pop_front()?; - if state.items.len() < self.bound { - // Assumes threads cannot be canceled. - self.bounded_cv.notify_one(); - } - Some(value) - } - } - - pub fn try_pop_all(&self) -> Vec { - let mut state = self.state.lock().unwrap(); - let result = state.items.drain(..).collect(); - self.bounded_cv.notify_all(); - result - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/restricted_names.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/restricted_names.rs deleted file mode 100644 index 650ae2330..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/restricted_names.rs +++ /dev/null @@ -1,99 +0,0 @@ -//! Helpers for validating and checking names like package and crate names. - -use crate::util::CargoResult; -use anyhow::bail; -use std::path::Path; - -/// Returns `true` if the name contains non-ASCII characters. -pub fn is_non_ascii_name(name: &str) -> bool { - name.chars().any(|ch| ch > '\x7f') -} - -/// A Rust keyword. -pub fn is_keyword(name: &str) -> bool { - // See https://doc.rust-lang.org/reference/keywords.html - [ - "Self", "abstract", "as", "async", "await", "become", "box", "break", "const", "continue", - "crate", "do", "dyn", "else", "enum", "extern", "false", "final", "fn", "for", "if", - "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut", "override", "priv", - "pub", "ref", "return", "self", "static", "struct", "super", "trait", "true", "try", - "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield", - ] - .contains(&name) -} - -/// These names cannot be used on Windows, even with an extension. -pub fn is_windows_reserved(name: &str) -> bool { - [ - "con", "prn", "aux", "nul", "com1", "com2", "com3", "com4", "com5", "com6", "com7", "com8", - "com9", "lpt1", "lpt2", "lpt3", "lpt4", "lpt5", "lpt6", "lpt7", "lpt8", "lpt9", - ] - .contains(&name.to_ascii_lowercase().as_str()) -} - -/// An artifact with this name will conflict with one of Cargo's build directories. -pub fn is_conflicting_artifact_name(name: &str) -> bool { - ["deps", "examples", "build", "incremental"].contains(&name) -} - -/// Check the base requirements for a package name. -/// -/// This can be used for other things than package names, to enforce some -/// level of sanity. Note that package names have other restrictions -/// elsewhere. `cargo new` has a few restrictions, such as checking for -/// reserved names. crates.io has even more restrictions. -pub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<()> { - let mut chars = name.chars(); - if let Some(ch) = chars.next() { - if ch.is_digit(10) { - // A specific error for a potentially common case. - bail!( - "the name `{}` cannot be used as a {}, \ - the name cannot start with a digit{}", - name, - what, - help - ); - } - if !(unicode_xid::UnicodeXID::is_xid_start(ch) || ch == '_') { - bail!( - "invalid character `{}` in {}: `{}`, \ - the first character must be a Unicode XID start character \ - (most letters or `_`){}", - ch, - what, - name, - help - ); - } - } - for ch in chars { - if !(unicode_xid::UnicodeXID::is_xid_continue(ch) || ch == '-') { - bail!( - "invalid character `{}` in {}: `{}`, \ - characters must be Unicode XID characters \ - (numbers, `-`, `_`, or most letters){}", - ch, - what, - name, - help - ); - } - } - Ok(()) -} - -/// Check the entire path for names reserved in Windows. -pub fn is_windows_reserved_path(path: &Path) -> bool { - path.iter() - .filter_map(|component| component.to_str()) - .any(|component| { - let stem = component.split('.').next().unwrap(); - is_windows_reserved(stem) - }) -} - -/// Returns `true` if the name contains any glob pattern wildcards. -pub fn is_glob_pattern>(name: T) -> bool { - name.as_ref().contains(&['*', '?', '[', ']'][..]) -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/rustc.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/rustc.rs deleted file mode 100644 index 33f96c155..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/rustc.rs +++ /dev/null @@ -1,359 +0,0 @@ -use std::collections::hash_map::HashMap; -use std::env; -use std::hash::{Hash, Hasher}; -use std::path::{Path, PathBuf}; -use std::sync::Mutex; - -use anyhow::Context as _; -use cargo_util::{paths, ProcessBuilder, ProcessError}; -use log::{debug, info, warn}; -use serde::{Deserialize, Serialize}; - -use crate::util::interning::InternedString; -use crate::util::{profile, CargoResult, StableHasher}; - -/// Information on the `rustc` executable -#[derive(Debug)] -pub struct Rustc { - /// The location of the exe - pub path: PathBuf, - /// An optional program that will be passed the path of the rust exe as its first argument, and - /// rustc args following this. - pub wrapper: Option, - /// An optional wrapper to be used in addition to `rustc.wrapper` for workspace crates - pub workspace_wrapper: Option, - /// Verbose version information (the output of `rustc -vV`) - pub verbose_version: String, - /// The rustc version (`1.23.4-beta.2`), this comes from verbose_version. - pub version: semver::Version, - /// The host triple (arch-platform-OS), this comes from verbose_version. - pub host: InternedString, - cache: Mutex, -} - -impl Rustc { - /// Runs the compiler at `path` to learn various pieces of information about - /// it, with an optional wrapper. - /// - /// If successful this function returns a description of the compiler along - /// with a list of its capabilities. - pub fn new( - path: PathBuf, - wrapper: Option, - workspace_wrapper: Option, - rustup_rustc: &Path, - cache_location: Option, - ) -> CargoResult { - let _p = profile::start("Rustc::new"); - - let mut cache = Cache::load( - wrapper.as_deref(), - workspace_wrapper.as_deref(), - &path, - rustup_rustc, - cache_location, - ); - - let mut cmd = ProcessBuilder::new(&path); - cmd.arg("-vV"); - let verbose_version = cache.cached_output(&cmd, 0)?.0; - - let extract = |field: &str| -> CargoResult<&str> { - verbose_version - .lines() - .find(|l| l.starts_with(field)) - .map(|l| &l[field.len()..]) - .ok_or_else(|| { - anyhow::format_err!( - "`rustc -vV` didn't have a line for `{}`, got:\n{}", - field.trim(), - verbose_version - ) - }) - }; - - let host = InternedString::new(extract("host: ")?); - let version = semver::Version::parse(extract("release: ")?).with_context(|| { - format!( - "rustc version does not appear to be a valid semver version, from:\n{}", - verbose_version - ) - })?; - - Ok(Rustc { - path, - wrapper, - workspace_wrapper, - verbose_version, - version, - host, - cache: Mutex::new(cache), - }) - } - - /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. - pub fn process(&self) -> ProcessBuilder { - ProcessBuilder::new(self.path.as_path()).wrapped(self.wrapper.as_ref()) - } - - /// Gets a process builder set up to use the found rustc version, with a wrapper if `Some`. - pub fn workspace_process(&self) -> ProcessBuilder { - ProcessBuilder::new(self.path.as_path()) - .wrapped(self.workspace_wrapper.as_ref()) - .wrapped(self.wrapper.as_ref()) - } - - pub fn process_no_wrapper(&self) -> ProcessBuilder { - ProcessBuilder::new(&self.path) - } - - /// Gets the output for the given command. - /// - /// This will return the cached value if available, otherwise it will run - /// the command and cache the output. - /// - /// `extra_fingerprint` is extra data to include in the cache fingerprint. - /// Use this if there is other information about the environment that may - /// affect the output that is not part of `cmd`. - /// - /// Returns a tuple of strings `(stdout, stderr)`. - pub fn cached_output( - &self, - cmd: &ProcessBuilder, - extra_fingerprint: u64, - ) -> CargoResult<(String, String)> { - self.cache - .lock() - .unwrap() - .cached_output(cmd, extra_fingerprint) - } -} - -/// It is a well known fact that `rustc` is not the fastest compiler in the -/// world. What is less known is that even `rustc --version --verbose` takes -/// about a hundred milliseconds! Because we need compiler version info even -/// for no-op builds, we cache it here, based on compiler's mtime and rustup's -/// current toolchain. -/// -/// -/// -#[derive(Debug)] -struct Cache { - cache_location: Option, - dirty: bool, - data: CacheData, -} - -#[derive(Serialize, Deserialize, Debug, Default)] -struct CacheData { - rustc_fingerprint: u64, - outputs: HashMap, - successes: HashMap, -} - -#[derive(Serialize, Deserialize, Debug)] -struct Output { - success: bool, - status: String, - code: Option, - stdout: String, - stderr: String, -} - -impl Cache { - fn load( - wrapper: Option<&Path>, - workspace_wrapper: Option<&Path>, - rustc: &Path, - rustup_rustc: &Path, - cache_location: Option, - ) -> Cache { - match ( - cache_location, - rustc_fingerprint(wrapper, workspace_wrapper, rustc, rustup_rustc), - ) { - (Some(cache_location), Ok(rustc_fingerprint)) => { - let empty = CacheData { - rustc_fingerprint, - outputs: HashMap::new(), - successes: HashMap::new(), - }; - let mut dirty = true; - let data = match read(&cache_location) { - Ok(data) => { - if data.rustc_fingerprint == rustc_fingerprint { - debug!("reusing existing rustc info cache"); - dirty = false; - data - } else { - debug!("different compiler, creating new rustc info cache"); - empty - } - } - Err(e) => { - debug!("failed to read rustc info cache: {}", e); - empty - } - }; - return Cache { - cache_location: Some(cache_location), - dirty, - data, - }; - - fn read(path: &Path) -> CargoResult { - let json = paths::read(path)?; - Ok(serde_json::from_str(&json)?) - } - } - (_, fingerprint) => { - if let Err(e) = fingerprint { - warn!("failed to calculate rustc fingerprint: {}", e); - } - debug!("rustc info cache disabled"); - Cache { - cache_location: None, - dirty: false, - data: CacheData::default(), - } - } - } - } - - fn cached_output( - &mut self, - cmd: &ProcessBuilder, - extra_fingerprint: u64, - ) -> CargoResult<(String, String)> { - let key = process_fingerprint(cmd, extra_fingerprint); - if self.data.outputs.contains_key(&key) { - debug!("rustc info cache hit"); - } else { - debug!("rustc info cache miss"); - debug!("running {}", cmd); - let output = cmd - .build_command() - .output() - .with_context(|| format!("could not execute process {} (never executed)", cmd))?; - let stdout = String::from_utf8(output.stdout) - .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) - .with_context(|| format!("`{}` didn't return utf8 output", cmd))?; - let stderr = String::from_utf8(output.stderr) - .map_err(|e| anyhow::anyhow!("{}: {:?}", e, e.as_bytes())) - .with_context(|| format!("`{}` didn't return utf8 output", cmd))?; - self.data.outputs.insert( - key, - Output { - success: output.status.success(), - status: if output.status.success() { - String::new() - } else { - cargo_util::exit_status_to_string(output.status) - }, - code: output.status.code(), - stdout, - stderr, - }, - ); - self.dirty = true; - } - let output = &self.data.outputs[&key]; - if output.success { - Ok((output.stdout.clone(), output.stderr.clone())) - } else { - Err(ProcessError::new_raw( - &format!("process didn't exit successfully: {}", cmd), - output.code, - &output.status, - Some(output.stdout.as_ref()), - Some(output.stderr.as_ref()), - ) - .into()) - } - } -} - -impl Drop for Cache { - fn drop(&mut self) { - if !self.dirty { - return; - } - if let Some(ref path) = self.cache_location { - let json = serde_json::to_string(&self.data).unwrap(); - match paths::write(path, json.as_bytes()) { - Ok(()) => info!("updated rustc info cache"), - Err(e) => warn!("failed to update rustc info cache: {}", e), - } - } - } -} - -fn rustc_fingerprint( - wrapper: Option<&Path>, - workspace_wrapper: Option<&Path>, - rustc: &Path, - rustup_rustc: &Path, -) -> CargoResult { - let mut hasher = StableHasher::new(); - - let hash_exe = |hasher: &mut _, path| -> CargoResult<()> { - let path = paths::resolve_executable(path)?; - path.hash(hasher); - - paths::mtime(&path)?.hash(hasher); - Ok(()) - }; - - hash_exe(&mut hasher, rustc)?; - if let Some(wrapper) = wrapper { - hash_exe(&mut hasher, wrapper)?; - } - if let Some(workspace_wrapper) = workspace_wrapper { - hash_exe(&mut hasher, workspace_wrapper)?; - } - - // Rustup can change the effective compiler without touching - // the `rustc` binary, so we try to account for this here. - // If we see rustup's env vars, we mix them into the fingerprint, - // but we also mix in the mtime of the actual compiler (and not - // the rustup shim at `~/.cargo/bin/rustup`), because `RUSTUP_TOOLCHAIN` - // could be just `stable-x86_64-unknown-linux-gnu`, i.e, it could - // not mention the version of Rust at all, which changes after - // `rustup update`. - // - // If we don't see rustup env vars, but it looks like the compiler - // is managed by rustup, we conservatively bail out. - let maybe_rustup = rustup_rustc == rustc; - match ( - maybe_rustup, - env::var("RUSTUP_HOME"), - env::var("RUSTUP_TOOLCHAIN"), - ) { - (_, Ok(rustup_home), Ok(rustup_toolchain)) => { - debug!("adding rustup info to rustc fingerprint"); - rustup_toolchain.hash(&mut hasher); - rustup_home.hash(&mut hasher); - let real_rustc = Path::new(&rustup_home) - .join("toolchains") - .join(rustup_toolchain) - .join("bin") - .join("rustc") - .with_extension(env::consts::EXE_EXTENSION); - paths::mtime(&real_rustc)?.hash(&mut hasher); - } - (true, _, _) => anyhow::bail!("probably rustup rustc, but without rustup's env vars"), - _ => (), - } - - Ok(hasher.finish()) -} - -fn process_fingerprint(cmd: &ProcessBuilder, extra_fingerprint: u64) -> u64 { - let mut hasher = StableHasher::new(); - extra_fingerprint.hash(&mut hasher); - cmd.get_args().hash(&mut hasher); - let mut env = cmd.get_envs().iter().collect::>(); - env.sort_unstable(); - env.hash(&mut hasher); - hasher.finish() -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/semver_ext.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/semver_ext.rs deleted file mode 100644 index de6d68e16..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/semver_ext.rs +++ /dev/null @@ -1,146 +0,0 @@ -use semver::{Comparator, Op, Version, VersionReq}; -use std::fmt::{self, Display}; - -#[derive(PartialEq, Eq, Hash, Clone, Debug)] -pub enum OptVersionReq { - Any, - Req(VersionReq), - /// The exact locked version and the original version requirement. - Locked(Version, VersionReq), -} - -pub trait VersionExt { - fn is_prerelease(&self) -> bool; -} - -pub trait VersionReqExt { - fn exact(version: &Version) -> Self; -} - -impl VersionExt for Version { - fn is_prerelease(&self) -> bool { - !self.pre.is_empty() - } -} - -impl VersionReqExt for VersionReq { - fn exact(version: &Version) -> Self { - VersionReq { - comparators: vec![Comparator { - op: Op::Exact, - major: version.major, - minor: Some(version.minor), - patch: Some(version.patch), - pre: version.pre.clone(), - }], - } - } -} - -impl OptVersionReq { - pub fn exact(version: &Version) -> Self { - OptVersionReq::Req(VersionReq::exact(version)) - } - - pub fn is_exact(&self) -> bool { - match self { - OptVersionReq::Any => false, - OptVersionReq::Req(req) => { - req.comparators.len() == 1 && { - let cmp = &req.comparators[0]; - cmp.op == Op::Exact && cmp.minor.is_some() && cmp.patch.is_some() - } - } - OptVersionReq::Locked(..) => true, - } - } - - pub fn lock_to(&mut self, version: &Version) { - assert!(self.matches(version), "cannot lock {} to {}", self, version); - use OptVersionReq::*; - let version = version.clone(); - *self = match self { - Any => Locked(version, VersionReq::STAR), - Req(req) => Locked(version, req.clone()), - Locked(_, req) => Locked(version, req.clone()), - }; - } - - pub fn is_locked(&self) -> bool { - matches!(self, OptVersionReq::Locked(..)) - } - - /// Gets the version to which this req is locked, if any. - pub fn locked_version(&self) -> Option<&Version> { - match self { - OptVersionReq::Locked(version, _) => Some(version), - _ => None, - } - } - - pub fn matches(&self, version: &Version) -> bool { - match self { - OptVersionReq::Any => true, - OptVersionReq::Req(req) => req.matches(version), - OptVersionReq::Locked(v, _) => { - v.major == version.major - && v.minor == version.minor - && v.patch == version.patch - && v.pre == version.pre - } - } - } -} - -impl Display for OptVersionReq { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - OptVersionReq::Any => f.write_str("*"), - OptVersionReq::Req(req) => Display::fmt(req, f), - OptVersionReq::Locked(_, req) => Display::fmt(req, f), - } - } -} - -impl From for OptVersionReq { - fn from(req: VersionReq) -> Self { - OptVersionReq::Req(req) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn locked_has_the_same_with_exact() { - fn test_versions(target_ver: &str, vers: &[&str]) { - let ver = Version::parse(target_ver).unwrap(); - let exact = OptVersionReq::exact(&ver); - let mut locked = exact.clone(); - locked.lock_to(&ver); - for v in vers { - let v = Version::parse(v).unwrap(); - assert_eq!(exact.matches(&v), locked.matches(&v)); - } - } - - test_versions( - "1.0.0", - &["1.0.0", "1.0.1", "0.9.9", "0.10.0", "0.1.0", "1.0.0-pre"], - ); - test_versions("0.9.0", &["0.9.0", "0.9.1", "1.9.0", "0.0.9", "0.9.0-pre"]); - test_versions("0.0.2", &["0.0.2", "0.0.1", "0.0.3", "0.0.2-pre"]); - test_versions( - "0.1.0-beta2.a", - &[ - "0.1.0-beta2.a", - "0.9.1", - "0.1.0", - "0.1.1-beta2.a", - "0.1.0-beta2", - ], - ); - test_versions("0.1.0+meta", &["0.1.0", "0.1.0+meta", "0.1.0+any"]); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/to_semver.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/to_semver.rs deleted file mode 100644 index 25da9dfb9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/to_semver.rs +++ /dev/null @@ -1,33 +0,0 @@ -use crate::util::errors::CargoResult; -use semver::Version; - -pub trait ToSemver { - fn to_semver(self) -> CargoResult; -} - -impl ToSemver for Version { - fn to_semver(self) -> CargoResult { - Ok(self) - } -} - -impl<'a> ToSemver for &'a str { - fn to_semver(self) -> CargoResult { - match Version::parse(self.trim()) { - Ok(v) => Ok(v), - Err(..) => Err(anyhow::format_err!("cannot parse '{}' as a semver", self)), - } - } -} - -impl<'a> ToSemver for &'a String { - fn to_semver(self) -> CargoResult { - (**self).to_semver() - } -} - -impl<'a> ToSemver for &'a Version { - fn to_semver(self) -> CargoResult { - Ok(self.clone()) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/toml/mod.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/toml/mod.rs deleted file mode 100644 index 39835c752..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/toml/mod.rs +++ /dev/null @@ -1,2031 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; -use std::fmt; -use std::marker::PhantomData; -use std::path::{Path, PathBuf}; -use std::rc::Rc; -use std::str; - -use anyhow::{anyhow, bail, Context as _}; -use cargo_platform::Platform; -use cargo_util::paths; -use log::{debug, trace}; -use semver::{self, VersionReq}; -use serde::de; -use serde::ser; -use serde::{Deserialize, Serialize}; -use url::Url; - -use crate::core::compiler::{CompileKind, CompileTarget}; -use crate::core::dependency::DepKind; -use crate::core::manifest::{ManifestMetadata, TargetSourcePath, Warnings}; -use crate::core::resolver::ResolveBehavior; -use crate::core::{Dependency, Manifest, PackageId, Summary, Target}; -use crate::core::{Edition, EitherManifest, Feature, Features, VirtualManifest, Workspace}; -use crate::core::{GitReference, PackageIdSpec, SourceId, WorkspaceConfig, WorkspaceRootConfig}; -use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY}; -use crate::util::errors::{CargoResult, ManifestError}; -use crate::util::interning::InternedString; -use crate::util::{ - self, config::ConfigRelativePath, validate_package_name, Config, IntoUrl, VersionReqExt, -}; - -mod targets; -use self::targets::targets; - -/// Loads a `Cargo.toml` from a file on disk. -/// -/// This could result in a real or virtual manifest being returned. -/// -/// A list of nested paths is also returned, one for each path dependency -/// within the manifest. For virtual manifests, these paths can only -/// come from patched or replaced dependencies. These paths are not -/// canonicalized. -pub fn read_manifest( - path: &Path, - source_id: SourceId, - config: &Config, -) -> Result<(EitherManifest, Vec), ManifestError> { - trace!( - "read_manifest; path={}; source-id={}", - path.display(), - source_id - ); - let contents = paths::read(path).map_err(|err| ManifestError::new(err, path.into()))?; - - read_manifest_from_str(&contents, path, source_id, config) - .with_context(|| format!("failed to parse manifest at `{}`", path.display())) - .map_err(|err| ManifestError::new(err, path.into())) -} - -/// Parse an already-loaded `Cargo.toml` as a Cargo manifest. -/// -/// This could result in a real or virtual manifest being returned. -/// -/// A list of nested paths is also returned, one for each path dependency -/// within the manifest. For virtual manifests, these paths can only -/// come from patched or replaced dependencies. These paths are not -/// canonicalized. -pub fn read_manifest_from_str( - contents: &str, - manifest_file: &Path, - source_id: SourceId, - config: &Config, -) -> CargoResult<(EitherManifest, Vec)> { - let package_root = manifest_file.parent().unwrap(); - - let toml = { - let pretty_filename = manifest_file - .strip_prefix(config.cwd()) - .unwrap_or(manifest_file); - parse(contents, pretty_filename, config)? - }; - - // Provide a helpful error message for a common user error. - if let Some(package) = toml.get("package").or_else(|| toml.get("project")) { - if let Some(feats) = package.get("cargo-features") { - bail!( - "cargo-features = {} was found in the wrong location: it \ - should be set at the top of Cargo.toml before any tables", - toml::to_string(feats).unwrap() - ); - } - } - - let mut unused = BTreeSet::new(); - let manifest: TomlManifest = serde_ignored::deserialize(toml, |path| { - let mut key = String::new(); - stringify(&mut key, &path); - unused.insert(key); - })?; - let add_unused = |warnings: &mut Warnings| { - for key in unused { - warnings.add_warning(format!("unused manifest key: {}", key)); - if key == "profiles.debug" { - warnings.add_warning("use `[profile.dev]` to configure debug builds".to_string()); - } - } - }; - - let manifest = Rc::new(manifest); - return if manifest.project.is_some() || manifest.package.is_some() { - let (mut manifest, paths) = - TomlManifest::to_real_manifest(&manifest, source_id, package_root, config)?; - add_unused(manifest.warnings_mut()); - if manifest.targets().iter().all(|t| t.is_custom_build()) { - bail!( - "no targets specified in the manifest\n\ - either src/lib.rs, src/main.rs, a [lib] section, or \ - [[bin]] section must be present" - ) - } - Ok((EitherManifest::Real(manifest), paths)) - } else { - let (mut m, paths) = - TomlManifest::to_virtual_manifest(&manifest, source_id, package_root, config)?; - add_unused(m.warnings_mut()); - Ok((EitherManifest::Virtual(m), paths)) - }; - - fn stringify(dst: &mut String, path: &serde_ignored::Path<'_>) { - use serde_ignored::Path; - - match *path { - Path::Root => {} - Path::Seq { parent, index } => { - stringify(dst, parent); - if !dst.is_empty() { - dst.push('.'); - } - dst.push_str(&index.to_string()); - } - Path::Map { parent, ref key } => { - stringify(dst, parent); - if !dst.is_empty() { - dst.push('.'); - } - dst.push_str(key); - } - Path::Some { parent } - | Path::NewtypeVariant { parent } - | Path::NewtypeStruct { parent } => stringify(dst, parent), - } - } -} - -/// Attempts to parse a string into a [`toml::Value`]. This is not specific to any -/// particular kind of TOML file. -/// -/// The purpose of this wrapper is to detect invalid TOML which was previously -/// accepted and display a warning to the user in that case. The `file` and `config` -/// parameters are only used by this fallback path. -pub fn parse(toml: &str, _file: &Path, _config: &Config) -> CargoResult { - // At the moment, no compatibility checks are needed. - toml.parse() - .map_err(|e| anyhow::Error::from(e).context("could not parse input as TOML")) -} - -type TomlLibTarget = TomlTarget; -type TomlBinTarget = TomlTarget; -type TomlExampleTarget = TomlTarget; -type TomlTestTarget = TomlTarget; -type TomlBenchTarget = TomlTarget; - -#[derive(Clone, Debug, Serialize)] -#[serde(untagged)] -pub enum TomlDependency

{ - /// In the simple format, only a version is specified, eg. - /// `package = ""` - Simple(String), - /// The simple format is equivalent to a detailed dependency - /// specifying only a version, eg. - /// `package = { version = "" }` - Detailed(DetailedTomlDependency

), -} - -impl<'de, P: Deserialize<'de>> de::Deserialize<'de> for TomlDependency

{ - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - struct TomlDependencyVisitor

(PhantomData

); - - impl<'de, P: Deserialize<'de>> de::Visitor<'de> for TomlDependencyVisitor

{ - type Value = TomlDependency

; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str( - "a version string like \"0.9.8\" or a \ - detailed dependency like { version = \"0.9.8\" }", - ) - } - - fn visit_str(self, s: &str) -> Result - where - E: de::Error, - { - Ok(TomlDependency::Simple(s.to_owned())) - } - - fn visit_map(self, map: V) -> Result - where - V: de::MapAccess<'de>, - { - let mvd = de::value::MapAccessDeserializer::new(map); - DetailedTomlDependency::deserialize(mvd).map(TomlDependency::Detailed) - } - } - - deserializer.deserialize_any(TomlDependencyVisitor(PhantomData)) - } -} - -pub trait ResolveToPath { - fn resolve(&self, config: &Config) -> PathBuf; -} - -impl ResolveToPath for String { - fn resolve(&self, _: &Config) -> PathBuf { - self.into() - } -} - -impl ResolveToPath for ConfigRelativePath { - fn resolve(&self, c: &Config) -> PathBuf { - self.resolve_path(c) - } -} - -#[derive(Deserialize, Serialize, Clone, Debug)] -#[serde(rename_all = "kebab-case")] -pub struct DetailedTomlDependency

{ - version: Option, - registry: Option, - /// The URL of the `registry` field. - /// This is an internal implementation detail. When Cargo creates a - /// package, it replaces `registry` with `registry-index` so that the - /// manifest contains the correct URL. All users won't have the same - /// registry names configured, so Cargo can't rely on just the name for - /// crates published by other users. - registry_index: Option, - // `path` is relative to the file it appears in. If that's a `Cargo.toml`, it'll be relative to - // that TOML file, and if it's a `.cargo/config` file, it'll be relative to that file. - path: Option

, - git: Option, - branch: Option, - tag: Option, - rev: Option, - features: Option>, - optional: Option, - default_features: Option, - #[serde(rename = "default_features")] - default_features2: Option, - package: Option, - public: Option, -} - -// Explicit implementation so we avoid pulling in P: Default -impl

Default for DetailedTomlDependency

{ - fn default() -> Self { - Self { - version: Default::default(), - registry: Default::default(), - registry_index: Default::default(), - path: Default::default(), - git: Default::default(), - branch: Default::default(), - tag: Default::default(), - rev: Default::default(), - features: Default::default(), - optional: Default::default(), - default_features: Default::default(), - default_features2: Default::default(), - package: Default::default(), - public: Default::default(), - } - } -} - -/// This type is used to deserialize `Cargo.toml` files. -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct TomlManifest { - cargo_features: Option>, - package: Option>, - project: Option>, - profile: Option, - lib: Option, - bin: Option>, - example: Option>, - test: Option>, - bench: Option>, - dependencies: Option>, - dev_dependencies: Option>, - #[serde(rename = "dev_dependencies")] - dev_dependencies2: Option>, - build_dependencies: Option>, - #[serde(rename = "build_dependencies")] - build_dependencies2: Option>, - features: Option>>, - target: Option>, - replace: Option>, - patch: Option>>, - workspace: Option, - badges: Option>>, -} - -#[derive(Deserialize, Serialize, Clone, Debug, Default)] -pub struct TomlProfiles(BTreeMap); - -impl TomlProfiles { - pub fn get_all(&self) -> &BTreeMap { - &self.0 - } - - pub fn get(&self, name: &str) -> Option<&TomlProfile> { - self.0.get(name) - } - - pub fn validate(&self, features: &Features, warnings: &mut Vec) -> CargoResult<()> { - for (name, profile) in &self.0 { - profile.validate(name, features, warnings)?; - } - Ok(()) - } -} - -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct TomlOptLevel(pub String); - -impl<'de> de::Deserialize<'de> for TomlOptLevel { - fn deserialize(d: D) -> Result - where - D: de::Deserializer<'de>, - { - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = TomlOptLevel; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("an optimization level") - } - - fn visit_i64(self, value: i64) -> Result - where - E: de::Error, - { - Ok(TomlOptLevel(value.to_string())) - } - - fn visit_str(self, value: &str) -> Result - where - E: de::Error, - { - if value == "s" || value == "z" { - Ok(TomlOptLevel(value.to_string())) - } else { - Err(E::custom(format!( - "must be `0`, `1`, `2`, `3`, `s` or `z`, \ - but found the string: \"{}\"", - value - ))) - } - } - } - - d.deserialize_any(Visitor) - } -} - -impl ser::Serialize for TomlOptLevel { - fn serialize(&self, serializer: S) -> Result - where - S: ser::Serializer, - { - match self.0.parse::() { - Ok(n) => n.serialize(serializer), - Err(_) => self.0.serialize(serializer), - } - } -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(untagged, expecting = "expected a boolean or an integer")] -pub enum U32OrBool { - U32(u32), - Bool(bool), -} - -#[derive(Deserialize, Serialize, Clone, Debug, Default, Eq, PartialEq)] -#[serde(default, rename_all = "kebab-case")] -pub struct TomlProfile { - pub opt_level: Option, - pub lto: Option, - pub codegen_backend: Option, - pub codegen_units: Option, - pub debug: Option, - pub split_debuginfo: Option, - pub debug_assertions: Option, - pub rpath: Option, - pub panic: Option, - pub overflow_checks: Option, - pub incremental: Option, - pub dir_name: Option, - pub inherits: Option, - pub strip: Option, - // These two fields must be last because they are sub-tables, and TOML - // requires all non-tables to be listed first. - pub package: Option>, - pub build_override: Option>, -} - -#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] -pub enum ProfilePackageSpec { - Spec(PackageIdSpec), - All, -} - -impl ser::Serialize for ProfilePackageSpec { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - match *self { - ProfilePackageSpec::Spec(ref spec) => spec.serialize(s), - ProfilePackageSpec::All => "*".serialize(s), - } - } -} - -impl<'de> de::Deserialize<'de> for ProfilePackageSpec { - fn deserialize(d: D) -> Result - where - D: de::Deserializer<'de>, - { - let string = String::deserialize(d)?; - if string == "*" { - Ok(ProfilePackageSpec::All) - } else { - PackageIdSpec::parse(&string) - .map_err(de::Error::custom) - .map(ProfilePackageSpec::Spec) - } - } -} - -impl TomlProfile { - pub fn validate( - &self, - name: &str, - features: &Features, - warnings: &mut Vec, - ) -> CargoResult<()> { - if let Some(ref profile) = self.build_override { - features.require(Feature::profile_overrides())?; - profile.validate_override("build-override", features)?; - } - if let Some(ref packages) = self.package { - features.require(Feature::profile_overrides())?; - for profile in packages.values() { - profile.validate_override("package", features)?; - } - } - - // Feature gate definition of named profiles - match name { - "dev" | "release" | "bench" | "test" | "doc" => {} - _ => { - features.require(Feature::named_profiles())?; - } - } - - // Profile name validation - Self::validate_name(name)?; - - // Feature gate on uses of keys related to named profiles - if self.inherits.is_some() { - features.require(Feature::named_profiles())?; - } - - if let Some(dir_name) = self.dir_name { - // This is disabled for now, as we would like to stabilize named - // profiles without this, and then decide in the future if it is - // needed. This helps simplify the UI a little. - bail!( - "dir-name=\"{}\" in profile `{}` is not currently allowed, \ - directory names are tied to the profile name for custom profiles", - dir_name, - name - ); - } - - // `inherits` validation - if matches!(self.inherits.map(|s| s.as_str()), Some("debug")) { - bail!( - "profile.{}.inherits=\"debug\" should be profile.{}.inherits=\"dev\"", - name, - name - ); - } - - match name { - "doc" => { - warnings.push("profile `doc` is deprecated and has no effect".to_string()); - } - "test" | "bench" => { - if self.panic.is_some() { - warnings.push(format!("`panic` setting is ignored for `{}` profile", name)) - } - } - _ => {} - } - - if let Some(panic) = &self.panic { - if panic != "unwind" && panic != "abort" { - bail!( - "`panic` setting of `{}` is not a valid setting, \ - must be `unwind` or `abort`", - panic - ); - } - } - - if let Some(codegen_backend) = &self.codegen_backend { - features.require(Feature::codegen_backend())?; - if codegen_backend.contains(|c: char| !c.is_ascii_alphanumeric() && c != '_') { - bail!( - "`profile.{}.codegen-backend` setting of `{}` is not a valid backend name.", - name, - codegen_backend, - ); - } - } - - Ok(()) - } - - /// Validate dir-names and profile names according to RFC 2678. - pub fn validate_name(name: &str) -> CargoResult<()> { - if let Some(ch) = name - .chars() - .find(|ch| !ch.is_alphanumeric() && *ch != '_' && *ch != '-') - { - bail!( - "invalid character `{}` in profile name `{}`\n\ - Allowed characters are letters, numbers, underscore, and hyphen.", - ch, - name - ); - } - - const SEE_DOCS: &str = "See https://doc.rust-lang.org/cargo/reference/profiles.html \ - for more on configuring profiles."; - - let lower_name = name.to_lowercase(); - if lower_name == "debug" { - bail!( - "profile name `{}` is reserved\n\ - To configure the default development profile, use the name `dev` \ - as in [profile.dev]\n\ - {}", - name, - SEE_DOCS - ); - } - if lower_name == "build-override" { - bail!( - "profile name `{}` is reserved\n\ - To configure build dependency settings, use [profile.dev.build-override] \ - and [profile.release.build-override]\n\ - {}", - name, - SEE_DOCS - ); - } - - // These are some arbitrary reservations. We have no plans to use - // these, but it seems safer to reserve a few just in case we want to - // add more built-in profiles in the future. We can also uses special - // syntax like cargo:foo if needed. But it is unlikely these will ever - // be used. - if matches!( - lower_name.as_str(), - "build" - | "check" - | "clean" - | "config" - | "fetch" - | "fix" - | "install" - | "metadata" - | "package" - | "publish" - | "report" - | "root" - | "run" - | "rust" - | "rustc" - | "rustdoc" - | "target" - | "tmp" - | "uninstall" - ) || lower_name.starts_with("cargo") - { - bail!( - "profile name `{}` is reserved\n\ - Please choose a different name.\n\ - {}", - name, - SEE_DOCS - ); - } - - Ok(()) - } - - fn validate_override(&self, which: &str, features: &Features) -> CargoResult<()> { - if self.package.is_some() { - bail!("package-specific profiles cannot be nested"); - } - if self.build_override.is_some() { - bail!("build-override profiles cannot be nested"); - } - if self.panic.is_some() { - bail!("`panic` may not be specified in a `{}` profile", which) - } - if self.lto.is_some() { - bail!("`lto` may not be specified in a `{}` profile", which) - } - if self.rpath.is_some() { - bail!("`rpath` may not be specified in a `{}` profile", which) - } - if self.codegen_backend.is_some() { - features.require(Feature::codegen_backend())?; - } - Ok(()) - } - - /// Overwrite self's values with the given profile. - pub fn merge(&mut self, profile: &TomlProfile) { - if let Some(v) = &profile.opt_level { - self.opt_level = Some(v.clone()); - } - - if let Some(v) = &profile.lto { - self.lto = Some(v.clone()); - } - - if let Some(v) = profile.codegen_backend { - self.codegen_backend = Some(v); - } - - if let Some(v) = profile.codegen_units { - self.codegen_units = Some(v); - } - - if let Some(v) = &profile.debug { - self.debug = Some(v.clone()); - } - - if let Some(v) = profile.debug_assertions { - self.debug_assertions = Some(v); - } - - if let Some(v) = &profile.split_debuginfo { - self.split_debuginfo = Some(v.clone()); - } - - if let Some(v) = profile.rpath { - self.rpath = Some(v); - } - - if let Some(v) = &profile.panic { - self.panic = Some(v.clone()); - } - - if let Some(v) = profile.overflow_checks { - self.overflow_checks = Some(v); - } - - if let Some(v) = profile.incremental { - self.incremental = Some(v); - } - - if let Some(other_package) = &profile.package { - match &mut self.package { - Some(self_package) => { - for (spec, other_pkg_profile) in other_package { - match self_package.get_mut(spec) { - Some(p) => p.merge(other_pkg_profile), - None => { - self_package.insert(spec.clone(), other_pkg_profile.clone()); - } - } - } - } - None => self.package = Some(other_package.clone()), - } - } - - if let Some(other_bo) = &profile.build_override { - match &mut self.build_override { - Some(self_bo) => self_bo.merge(other_bo), - None => self.build_override = Some(other_bo.clone()), - } - } - - if let Some(v) = &profile.inherits { - self.inherits = Some(*v); - } - - if let Some(v) = &profile.dir_name { - self.dir_name = Some(*v); - } - - if let Some(v) = &profile.strip { - self.strip = Some(v.clone()); - } - } -} - -/// A StringOrVec can be parsed from either a TOML string or array, -/// but is always stored as a vector. -#[derive(Clone, Debug, Serialize, Eq, PartialEq, PartialOrd, Ord)] -pub struct StringOrVec(Vec); - -impl<'de> de::Deserialize<'de> for StringOrVec { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = StringOrVec; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("string or list of strings") - } - - fn visit_str(self, s: &str) -> Result - where - E: de::Error, - { - Ok(StringOrVec(vec![s.to_string()])) - } - - fn visit_seq(self, v: V) -> Result - where - V: de::SeqAccess<'de>, - { - let seq = de::value::SeqAccessDeserializer::new(v); - Vec::deserialize(seq).map(StringOrVec) - } - } - - deserializer.deserialize_any(Visitor) - } -} - -impl StringOrVec { - pub fn iter<'a>(&'a self) -> std::slice::Iter<'a, String> { - self.0.iter() - } -} - -#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] -#[serde(untagged, expecting = "expected a boolean or a string")] -pub enum StringOrBool { - String(String), - Bool(bool), -} - -#[derive(PartialEq, Clone, Debug, Serialize)] -#[serde(untagged)] -pub enum VecStringOrBool { - VecString(Vec), - Bool(bool), -} - -impl<'de> de::Deserialize<'de> for VecStringOrBool { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = VecStringOrBool; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a boolean or vector of strings") - } - - fn visit_seq(self, v: V) -> Result - where - V: de::SeqAccess<'de>, - { - let seq = de::value::SeqAccessDeserializer::new(v); - Vec::deserialize(seq).map(VecStringOrBool::VecString) - } - - fn visit_bool(self, b: bool) -> Result - where - E: de::Error, - { - Ok(VecStringOrBool::Bool(b)) - } - } - - deserializer.deserialize_any(Visitor) - } -} - -fn version_trim_whitespace<'de, D>(deserializer: D) -> Result -where - D: de::Deserializer<'de>, -{ - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = semver::Version; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("SemVer version") - } - - fn visit_str(self, string: &str) -> Result - where - E: de::Error, - { - string.trim().parse().map_err(de::Error::custom) - } - } - - deserializer.deserialize_str(Visitor) -} - -/// Represents the `package`/`project` sections of a `Cargo.toml`. -/// -/// Note that the order of the fields matters, since this is the order they -/// are serialized to a TOML file. For example, you cannot have values after -/// the field `metadata`, since it is a table and values cannot appear after -/// tables. -#[derive(Deserialize, Serialize, Clone, Debug)] -#[serde(rename_all = "kebab-case")] -pub struct TomlProject { - edition: Option, - rust_version: Option, - name: InternedString, - #[serde(deserialize_with = "version_trim_whitespace")] - version: semver::Version, - authors: Option>, - build: Option, - metabuild: Option, - #[serde(rename = "default-target")] - default_target: Option, - #[serde(rename = "forced-target")] - forced_target: Option, - links: Option, - exclude: Option>, - include: Option>, - publish: Option, - workspace: Option, - im_a_teapot: Option, - autobins: Option, - autoexamples: Option, - autotests: Option, - autobenches: Option, - default_run: Option, - - // Package metadata. - description: Option, - homepage: Option, - documentation: Option, - readme: Option, - keywords: Option>, - categories: Option>, - license: Option, - license_file: Option, - repository: Option, - resolver: Option, - - // Note that this field must come last due to the way toml serialization - // works which requires tables to be emitted after all values. - metadata: Option, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct TomlWorkspace { - members: Option>, - #[serde(rename = "default-members")] - default_members: Option>, - exclude: Option>, - resolver: Option, - - // Note that this field must come last due to the way toml serialization - // works which requires tables to be emitted after all values. - metadata: Option, -} - -impl TomlProject { - pub fn to_package_id(&self, source_id: SourceId) -> CargoResult { - PackageId::new(self.name, self.version.clone(), source_id) - } -} - -struct Context<'a, 'b> { - deps: &'a mut Vec, - source_id: SourceId, - nested_paths: &'a mut Vec, - config: &'b Config, - warnings: &'a mut Vec, - platform: Option, - root: &'a Path, - features: &'a Features, -} - -impl TomlManifest { - /// Prepares the manifest for publishing. - // - Path and git components of dependency specifications are removed. - // - License path is updated to point within the package. - pub fn prepare_for_publish( - &self, - ws: &Workspace<'_>, - package_root: &Path, - ) -> CargoResult { - let config = ws.config(); - let mut package = self - .package - .as_ref() - .or_else(|| self.project.as_ref()) - .unwrap() - .clone(); - package.workspace = None; - package.resolver = ws.resolve_behavior().to_manifest(); - if let Some(license_file) = &package.license_file { - let license_path = Path::new(&license_file); - let abs_license_path = paths::normalize_path(&package_root.join(license_path)); - if abs_license_path.strip_prefix(package_root).is_err() { - // This path points outside of the package root. `cargo package` - // will copy it into the root, so adjust the path to this location. - package.license_file = Some( - license_path - .file_name() - .unwrap() - .to_str() - .unwrap() - .to_string(), - ); - } - } - let all = |_d: &TomlDependency| true; - return Ok(TomlManifest { - package: Some(package), - project: None, - profile: self.profile.clone(), - lib: self.lib.clone(), - bin: self.bin.clone(), - example: self.example.clone(), - test: self.test.clone(), - bench: self.bench.clone(), - dependencies: map_deps(config, self.dependencies.as_ref(), all)?, - dev_dependencies: map_deps( - config, - self.dev_dependencies - .as_ref() - .or_else(|| self.dev_dependencies2.as_ref()), - TomlDependency::is_version_specified, - )?, - dev_dependencies2: None, - build_dependencies: map_deps( - config, - self.build_dependencies - .as_ref() - .or_else(|| self.build_dependencies2.as_ref()), - all, - )?, - build_dependencies2: None, - features: self.features.clone(), - target: match self.target.as_ref().map(|target_map| { - target_map - .iter() - .map(|(k, v)| { - Ok(( - k.clone(), - TomlPlatform { - dependencies: map_deps(config, v.dependencies.as_ref(), all)?, - dev_dependencies: map_deps( - config, - v.dev_dependencies - .as_ref() - .or_else(|| v.dev_dependencies2.as_ref()), - TomlDependency::is_version_specified, - )?, - dev_dependencies2: None, - build_dependencies: map_deps( - config, - v.build_dependencies - .as_ref() - .or_else(|| v.build_dependencies2.as_ref()), - all, - )?, - build_dependencies2: None, - }, - )) - }) - .collect() - }) { - Some(Ok(v)) => Some(v), - Some(Err(e)) => return Err(e), - None => None, - }, - replace: None, - patch: None, - workspace: None, - badges: self.badges.clone(), - cargo_features: self.cargo_features.clone(), - }); - - fn map_deps( - config: &Config, - deps: Option<&BTreeMap>, - filter: impl Fn(&TomlDependency) -> bool, - ) -> CargoResult>> { - let deps = match deps { - Some(deps) => deps, - None => return Ok(None), - }; - let deps = deps - .iter() - .filter(|(_k, v)| filter(v)) - .map(|(k, v)| Ok((k.clone(), map_dependency(config, v)?))) - .collect::>>()?; - Ok(Some(deps)) - } - - fn map_dependency(config: &Config, dep: &TomlDependency) -> CargoResult { - match dep { - TomlDependency::Detailed(d) => { - let mut d = d.clone(); - // Path dependencies become crates.io deps. - d.path.take(); - // Same with git dependencies. - d.git.take(); - d.branch.take(); - d.tag.take(); - d.rev.take(); - // registry specifications are elaborated to the index URL - if let Some(registry) = d.registry.take() { - let src = SourceId::alt_registry(config, ®istry)?; - d.registry_index = Some(src.url().to_string()); - } - Ok(TomlDependency::Detailed(d)) - } - TomlDependency::Simple(s) => Ok(TomlDependency::Detailed(DetailedTomlDependency { - version: Some(s.clone()), - ..Default::default() - })), - } - } - } - - pub fn to_real_manifest( - me: &Rc, - source_id: SourceId, - package_root: &Path, - config: &Config, - ) -> CargoResult<(Manifest, Vec)> { - let mut nested_paths = vec![]; - let mut warnings = vec![]; - let mut errors = vec![]; - - // Parse features first so they will be available when parsing other parts of the TOML. - let empty = Vec::new(); - let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); - let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?; - - let project = me.project.as_ref().or_else(|| me.package.as_ref()); - let project = project.ok_or_else(|| anyhow!("no `package` section found"))?; - - let package_name = project.name.trim(); - if package_name.is_empty() { - bail!("package name cannot be an empty string") - } - - validate_package_name(package_name, "package name", "")?; - - let pkgid = project.to_package_id(source_id)?; - - let edition = if let Some(ref edition) = project.edition { - features - .require(Feature::edition()) - .with_context(|| "editions are unstable")?; - edition - .parse() - .with_context(|| "failed to parse the `edition` key")? - } else { - Edition::Edition2015 - }; - if edition == Edition::Edition2021 { - features.require(Feature::edition2021())?; - } else if !edition.is_stable() { - // Guard in case someone forgets to add .require() - return Err(util::errors::internal(format!( - "edition {} should be gated", - edition - ))); - } - - let rust_version = if let Some(rust_version) = &project.rust_version { - let req = match semver::VersionReq::parse(rust_version) { - // Exclude semver operators like `^` and pre-release identifiers - Ok(req) if rust_version.chars().all(|c| c.is_ascii_digit() || c == '.') => req, - _ => bail!("`rust-version` must be a value like \"1.32\""), - }; - if let Some(first_version) = edition.first_version() { - let unsupported = - semver::Version::new(first_version.major, first_version.minor - 1, 9999); - if req.matches(&unsupported) { - bail!( - "rust-version {} is older than first version ({}) required by \ - the specified edition ({})", - rust_version, - first_version, - edition, - ) - } - } - Some(rust_version.clone()) - } else { - None - }; - - if project.metabuild.is_some() { - features.require(Feature::metabuild())?; - } - - if project.resolver.is_some() - || me - .workspace - .as_ref() - .map_or(false, |ws| ws.resolver.is_some()) - { - features.require(Feature::resolver())?; - } - let resolve_behavior = match ( - project.resolver.as_ref(), - me.workspace.as_ref().and_then(|ws| ws.resolver.as_ref()), - ) { - (None, None) => None, - (Some(s), None) | (None, Some(s)) => Some(ResolveBehavior::from_manifest(s)?), - (Some(_), Some(_)) => { - bail!("cannot specify `resolver` field in both `[workspace]` and `[package]`") - } - }; - - // If we have no lib at all, use the inferred lib, if available. - // If we have a lib with a path, we're done. - // If we have a lib with no path, use the inferred lib or else the package name. - let targets = targets( - &features, - me, - package_name, - package_root, - edition, - &project.build, - &project.metabuild, - &mut warnings, - &mut errors, - )?; - - if targets.is_empty() { - debug!("manifest has no build targets"); - } - - if let Err(e) = unique_build_targets(&targets, package_root) { - warnings.push(format!( - "file found to be present in multiple \ - build targets: {}", - e - )); - } - - if let Some(links) = &project.links { - if !targets.iter().any(|t| t.is_custom_build()) { - bail!( - "package `{}` specifies that it links to `{}` but does not \ - have a custom build script", - pkgid, - links - ) - } - } - - let mut deps = Vec::new(); - let replace; - let patch; - - { - let mut cx = Context { - deps: &mut deps, - source_id, - nested_paths: &mut nested_paths, - config, - warnings: &mut warnings, - features: &features, - platform: None, - root: package_root, - }; - - fn process_dependencies( - cx: &mut Context<'_, '_>, - new_deps: Option<&BTreeMap>, - kind: Option, - ) -> CargoResult<()> { - let dependencies = match new_deps { - Some(dependencies) => dependencies, - None => return Ok(()), - }; - for (n, v) in dependencies.iter() { - let dep = v.to_dependency(n, cx, kind)?; - validate_package_name(dep.name_in_toml().as_str(), "dependency name", "")?; - cx.deps.push(dep); - } - - Ok(()) - } - - // Collect the dependencies. - process_dependencies(&mut cx, me.dependencies.as_ref(), None)?; - let dev_deps = me - .dev_dependencies - .as_ref() - .or_else(|| me.dev_dependencies2.as_ref()); - process_dependencies(&mut cx, dev_deps, Some(DepKind::Development))?; - let build_deps = me - .build_dependencies - .as_ref() - .or_else(|| me.build_dependencies2.as_ref()); - process_dependencies(&mut cx, build_deps, Some(DepKind::Build))?; - - for (name, platform) in me.target.iter().flatten() { - cx.platform = { - let platform: Platform = name.parse()?; - platform.check_cfg_attributes(cx.warnings); - Some(platform) - }; - process_dependencies(&mut cx, platform.dependencies.as_ref(), None)?; - let build_deps = platform - .build_dependencies - .as_ref() - .or_else(|| platform.build_dependencies2.as_ref()); - process_dependencies(&mut cx, build_deps, Some(DepKind::Build))?; - let dev_deps = platform - .dev_dependencies - .as_ref() - .or_else(|| platform.dev_dependencies2.as_ref()); - process_dependencies(&mut cx, dev_deps, Some(DepKind::Development))?; - } - - replace = me.replace(&mut cx)?; - patch = me.patch(&mut cx)?; - } - - { - let mut names_sources = BTreeMap::new(); - for dep in &deps { - let name = dep.name_in_toml(); - let prev = names_sources.insert(name.to_string(), dep.source_id()); - if prev.is_some() && prev != Some(dep.source_id()) { - bail!( - "Dependency '{}' has different source paths depending on the build \ - target. Each dependency must have a single canonical source path \ - irrespective of build target.", - name - ); - } - } - } - - let exclude = project.exclude.clone().unwrap_or_default(); - let include = project.include.clone().unwrap_or_default(); - let empty_features = BTreeMap::new(); - - let summary = Summary::new( - config, - pkgid, - deps, - me.features.as_ref().unwrap_or(&empty_features), - project.links.as_deref(), - )?; - let unstable = config.cli_unstable(); - summary.unstable_gate(unstable.namespaced_features, unstable.weak_dep_features)?; - - let metadata = ManifestMetadata { - description: project.description.clone(), - homepage: project.homepage.clone(), - documentation: project.documentation.clone(), - readme: readme_for_project(package_root, project), - authors: project.authors.clone().unwrap_or_default(), - license: project.license.clone(), - license_file: project.license_file.clone(), - repository: project.repository.clone(), - keywords: project.keywords.clone().unwrap_or_default(), - categories: project.categories.clone().unwrap_or_default(), - badges: me.badges.clone().unwrap_or_default(), - links: project.links.clone(), - }; - - let workspace_config = match (me.workspace.as_ref(), project.workspace.as_ref()) { - (Some(config), None) => WorkspaceConfig::Root(WorkspaceRootConfig::new( - package_root, - &config.members, - &config.default_members, - &config.exclude, - &config.metadata, - )), - (None, root) => WorkspaceConfig::Member { - root: root.cloned(), - }, - (Some(..), Some(..)) => bail!( - "cannot configure both `package.workspace` and \ - `[workspace]`, only one can be specified" - ), - }; - let profiles = me.profile.clone(); - if let Some(profiles) = &profiles { - profiles.validate(&features, &mut warnings)?; - } - let publish = match project.publish { - Some(VecStringOrBool::VecString(ref vecstring)) => Some(vecstring.clone()), - Some(VecStringOrBool::Bool(false)) => Some(vec![]), - None | Some(VecStringOrBool::Bool(true)) => None, - }; - - if summary.features().contains_key("default-features") { - warnings.push( - "`default-features = [\"..\"]` was found in [features]. \ - Did you mean to use `default = [\"..\"]`?" - .to_string(), - ) - } - - if let Some(run) = &project.default_run { - if !targets - .iter() - .filter(|t| t.is_bin()) - .any(|t| t.name() == run) - { - let suggestion = - util::closest_msg(run, targets.iter().filter(|t| t.is_bin()), |t| t.name()); - bail!("default-run target `{}` not found{}", run, suggestion); - } - } - - let default_kind = project - .default_target - .as_ref() - .map(|t| CompileTarget::new(&*t)) - .transpose()? - .map(CompileKind::Target); - let forced_kind = project - .forced_target - .as_ref() - .map(|t| CompileTarget::new(&*t)) - .transpose()? - .map(CompileKind::Target); - - let custom_metadata = project.metadata.clone(); - let mut manifest = Manifest::new( - summary, - default_kind, - forced_kind, - targets, - exclude, - include, - project.links.clone(), - metadata, - custom_metadata, - profiles, - publish, - replace, - patch, - workspace_config, - features, - edition, - rust_version, - project.im_a_teapot, - project.default_run.clone(), - Rc::clone(me), - project.metabuild.clone().map(|sov| sov.0), - resolve_behavior, - ); - if project.license_file.is_some() && project.license.is_some() { - manifest.warnings_mut().add_warning( - "only one of `license` or `license-file` is necessary\n\ - `license` should be used if the package license can be expressed \ - with a standard SPDX expression.\n\ - `license-file` should be used if the package uses a non-standard license.\n\ - See https://doc.rust-lang.org/cargo/reference/manifest.html#the-license-and-license-file-fields \ - for more information." - .to_string(), - ); - } - for warning in warnings { - manifest.warnings_mut().add_warning(warning); - } - for error in errors { - manifest.warnings_mut().add_critical_warning(error); - } - - manifest.feature_gate()?; - - Ok((manifest, nested_paths)) - } - - fn to_virtual_manifest( - me: &Rc, - source_id: SourceId, - root: &Path, - config: &Config, - ) -> CargoResult<(VirtualManifest, Vec)> { - if me.project.is_some() { - bail!("this virtual manifest specifies a [project] section, which is not allowed"); - } - if me.package.is_some() { - bail!("this virtual manifest specifies a [package] section, which is not allowed"); - } - if me.lib.is_some() { - bail!("this virtual manifest specifies a [lib] section, which is not allowed"); - } - if me.bin.is_some() { - bail!("this virtual manifest specifies a [[bin]] section, which is not allowed"); - } - if me.example.is_some() { - bail!("this virtual manifest specifies a [[example]] section, which is not allowed"); - } - if me.test.is_some() { - bail!("this virtual manifest specifies a [[test]] section, which is not allowed"); - } - if me.bench.is_some() { - bail!("this virtual manifest specifies a [[bench]] section, which is not allowed"); - } - if me.dependencies.is_some() { - bail!("this virtual manifest specifies a [dependencies] section, which is not allowed"); - } - if me.dev_dependencies.is_some() || me.dev_dependencies2.is_some() { - bail!("this virtual manifest specifies a [dev-dependencies] section, which is not allowed"); - } - if me.build_dependencies.is_some() || me.build_dependencies2.is_some() { - bail!("this virtual manifest specifies a [build-dependencies] section, which is not allowed"); - } - if me.features.is_some() { - bail!("this virtual manifest specifies a [features] section, which is not allowed"); - } - if me.target.is_some() { - bail!("this virtual manifest specifies a [target] section, which is not allowed"); - } - if me.badges.is_some() { - bail!("this virtual manifest specifies a [badges] section, which is not allowed"); - } - - let mut nested_paths = Vec::new(); - let mut warnings = Vec::new(); - let mut deps = Vec::new(); - let empty = Vec::new(); - let cargo_features = me.cargo_features.as_ref().unwrap_or(&empty); - let features = Features::new(cargo_features, config, &mut warnings, source_id.is_path())?; - - let (replace, patch) = { - let mut cx = Context { - deps: &mut deps, - source_id, - nested_paths: &mut nested_paths, - config, - warnings: &mut warnings, - platform: None, - features: &features, - root, - }; - (me.replace(&mut cx)?, me.patch(&mut cx)?) - }; - let profiles = me.profile.clone(); - if let Some(profiles) = &profiles { - profiles.validate(&features, &mut warnings)?; - } - if me - .workspace - .as_ref() - .map_or(false, |ws| ws.resolver.is_some()) - { - features.require(Feature::resolver())?; - } - let resolve_behavior = me - .workspace - .as_ref() - .and_then(|ws| ws.resolver.as_deref()) - .map(|r| ResolveBehavior::from_manifest(r)) - .transpose()?; - let workspace_config = match me.workspace { - Some(ref config) => WorkspaceConfig::Root(WorkspaceRootConfig::new( - root, - &config.members, - &config.default_members, - &config.exclude, - &config.metadata, - )), - None => { - bail!("virtual manifests must be configured with [workspace]"); - } - }; - Ok(( - VirtualManifest::new( - replace, - patch, - workspace_config, - profiles, - features, - resolve_behavior, - ), - nested_paths, - )) - } - - fn replace(&self, cx: &mut Context<'_, '_>) -> CargoResult> { - if self.patch.is_some() && self.replace.is_some() { - bail!("cannot specify both [replace] and [patch]"); - } - let mut replace = Vec::new(); - for (spec, replacement) in self.replace.iter().flatten() { - let mut spec = PackageIdSpec::parse(spec).with_context(|| { - format!( - "replacements must specify a valid semver \ - version to replace, but `{}` does not", - spec - ) - })?; - if spec.url().is_none() { - spec.set_url(CRATES_IO_INDEX.parse().unwrap()); - } - - if replacement.is_version_specified() { - bail!( - "replacements cannot specify a version \ - requirement, but found one for `{}`", - spec - ); - } - - let mut dep = replacement.to_dependency(spec.name().as_str(), cx, None)?; - let version = spec.version().ok_or_else(|| { - anyhow!( - "replacements must specify a version \ - to replace, but `{}` does not", - spec - ) - })?; - dep.set_version_req(VersionReq::exact(version)) - .lock_version(version); - replace.push((spec, dep)); - } - Ok(replace) - } - - fn patch(&self, cx: &mut Context<'_, '_>) -> CargoResult>> { - let mut patch = HashMap::new(); - for (url, deps) in self.patch.iter().flatten() { - let url = match &url[..] { - CRATES_IO_REGISTRY => CRATES_IO_INDEX.parse().unwrap(), - _ => cx - .config - .get_registry_index(url) - .or_else(|_| url.into_url()) - .with_context(|| { - format!("[patch] entry `{}` should be a URL or registry name", url) - })?, - }; - patch.insert( - url, - deps.iter() - .map(|(name, dep)| dep.to_dependency(name, cx, None)) - .collect::>>()?, - ); - } - Ok(patch) - } - - /// Returns the path to the build script if one exists for this crate. - fn maybe_custom_build( - &self, - build: &Option, - package_root: &Path, - ) -> Option { - let build_rs = package_root.join("build.rs"); - match *build { - // Explicitly no build script. - Some(StringOrBool::Bool(false)) => None, - Some(StringOrBool::Bool(true)) => Some(build_rs), - Some(StringOrBool::String(ref s)) => Some(PathBuf::from(s)), - None => { - // If there is a `build.rs` file next to the `Cargo.toml`, assume it is - // a build script. - if build_rs.is_file() { - Some(build_rs) - } else { - None - } - } - } - } - - pub fn has_profiles(&self) -> bool { - self.profile.is_some() - } - - pub fn features(&self) -> Option<&BTreeMap>> { - self.features.as_ref() - } -} - -/// Returns the name of the README file for a `TomlProject`. -fn readme_for_project(package_root: &Path, project: &TomlProject) -> Option { - match &project.readme { - None => default_readme_from_package_root(package_root), - Some(value) => match value { - StringOrBool::Bool(false) => None, - StringOrBool::Bool(true) => Some("README.md".to_string()), - StringOrBool::String(v) => Some(v.clone()), - }, - } -} - -const DEFAULT_README_FILES: [&str; 3] = ["README.md", "README.txt", "README"]; - -/// Checks if a file with any of the default README file names exists in the package root. -/// If so, returns a `String` representing that name. -fn default_readme_from_package_root(package_root: &Path) -> Option { - for &readme_filename in DEFAULT_README_FILES.iter() { - if package_root.join(readme_filename).is_file() { - return Some(readme_filename.to_string()); - } - } - - None -} - -/// Checks a list of build targets, and ensures the target names are unique within a vector. -/// If not, the name of the offending build target is returned. -fn unique_build_targets(targets: &[Target], package_root: &Path) -> Result<(), String> { - let mut seen = HashSet::new(); - for target in targets { - if let TargetSourcePath::Path(path) = target.src_path() { - let full = package_root.join(path); - if !seen.insert(full.clone()) { - return Err(full.display().to_string()); - } - } - } - Ok(()) -} - -impl TomlDependency

{ - pub(crate) fn to_dependency_split( - &self, - name: &str, - source_id: SourceId, - nested_paths: &mut Vec, - config: &Config, - warnings: &mut Vec, - platform: Option, - root: &Path, - features: &Features, - kind: Option, - ) -> CargoResult { - self.to_dependency( - name, - &mut Context { - deps: &mut Vec::new(), - source_id, - nested_paths, - config, - warnings, - platform, - root, - features, - }, - kind, - ) - } - - fn to_dependency( - &self, - name: &str, - cx: &mut Context<'_, '_>, - kind: Option, - ) -> CargoResult { - match *self { - TomlDependency::Simple(ref version) => DetailedTomlDependency::

{ - version: Some(version.clone()), - ..Default::default() - } - .to_dependency(name, cx, kind), - TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind), - } - } - - fn is_version_specified(&self) -> bool { - match self { - TomlDependency::Detailed(d) => d.version.is_some(), - TomlDependency::Simple(..) => true, - } - } -} - -impl DetailedTomlDependency

{ - fn to_dependency( - &self, - name_in_toml: &str, - cx: &mut Context<'_, '_>, - kind: Option, - ) -> CargoResult { - if self.version.is_none() && self.path.is_none() && self.git.is_none() { - let msg = format!( - "dependency ({}) specified without \ - providing a local path, Git repository, or \ - version to use. This will be considered an \ - error in future versions", - name_in_toml - ); - cx.warnings.push(msg); - } - - if let Some(version) = &self.version { - if version.contains('+') { - cx.warnings.push(format!( - "version requirement `{}` for dependency `{}` \ - includes semver metadata which will be ignored, removing the \ - metadata is recommended to avoid confusion", - version, name_in_toml - )); - } - } - - if self.git.is_none() { - let git_only_keys = [ - (&self.branch, "branch"), - (&self.tag, "tag"), - (&self.rev, "rev"), - ]; - - for &(key, key_name) in &git_only_keys { - if key.is_some() { - bail!( - "key `{}` is ignored for dependency ({}).", - key_name, - name_in_toml - ); - } - } - } - - // Early detection of potentially misused feature syntax - // instead of generating a "feature not found" error. - if let Some(features) = &self.features { - for feature in features { - if feature.contains('/') { - bail!( - "feature `{}` in dependency `{}` is not allowed to contain slashes\n\ - If you want to enable features of a transitive dependency, \ - the direct dependency needs to re-export those features from \ - the `[features]` table.", - feature, - name_in_toml - ); - } - if feature.starts_with("dep:") { - bail!( - "feature `{}` in dependency `{}` is not allowed to use explicit \ - `dep:` syntax\n\ - If you want to enable an optional dependency, specify the name \ - of the optional dependency without the `dep:` prefix, or specify \ - a feature from the dependency's `[features]` table that enables \ - the optional dependency.", - feature, - name_in_toml - ); - } - } - } - - let new_source_id = match ( - self.git.as_ref(), - self.path.as_ref(), - self.registry.as_ref(), - self.registry_index.as_ref(), - ) { - (Some(_), _, Some(_), _) | (Some(_), _, _, Some(_)) => bail!( - "dependency ({}) specification is ambiguous. \ - Only one of `git` or `registry` is allowed.", - name_in_toml - ), - (_, _, Some(_), Some(_)) => bail!( - "dependency ({}) specification is ambiguous. \ - Only one of `registry` or `registry-index` is allowed.", - name_in_toml - ), - (Some(git), maybe_path, _, _) => { - if maybe_path.is_some() { - bail!( - "dependency ({}) specification is ambiguous. \ - Only one of `git` or `path` is allowed.", - name_in_toml - ); - } - - let n_details = [&self.branch, &self.tag, &self.rev] - .iter() - .filter(|d| d.is_some()) - .count(); - - if n_details > 1 { - bail!( - "dependency ({}) specification is ambiguous. \ - Only one of `branch`, `tag` or `rev` is allowed.", - name_in_toml - ); - } - - let reference = self - .branch - .clone() - .map(GitReference::Branch) - .or_else(|| self.tag.clone().map(GitReference::Tag)) - .or_else(|| self.rev.clone().map(GitReference::Rev)) - .unwrap_or(GitReference::DefaultBranch); - let loc = git.into_url()?; - - if let Some(fragment) = loc.fragment() { - let msg = format!( - "URL fragment `#{}` in git URL is ignored for dependency ({}). \ - If you were trying to specify a specific git revision, \ - use `rev = \"{}\"` in the dependency declaration.", - fragment, name_in_toml, fragment - ); - cx.warnings.push(msg) - } - - SourceId::for_git(&loc, reference)? - } - (None, Some(path), _, _) => { - let path = path.resolve(cx.config); - cx.nested_paths.push(path.clone()); - // If the source ID for the package we're parsing is a path - // source, then we normalize the path here to get rid of - // components like `..`. - // - // The purpose of this is to get a canonical ID for the package - // that we're depending on to ensure that builds of this package - // always end up hashing to the same value no matter where it's - // built from. - if cx.source_id.is_path() { - let path = cx.root.join(path); - let path = paths::normalize_path(&path); - SourceId::for_path(&path)? - } else { - cx.source_id - } - } - (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?, - (None, None, None, Some(registry_index)) => { - let url = registry_index.into_url()?; - SourceId::for_registry(&url)? - } - (None, None, None, None) => SourceId::crates_io(cx.config)?, - }; - - let (pkg_name, explicit_name_in_toml) = match self.package { - Some(ref s) => (&s[..], Some(name_in_toml)), - None => (name_in_toml, None), - }; - - let version = self.version.as_deref(); - let mut dep = Dependency::parse(pkg_name, version, new_source_id)?; - dep.set_features(self.features.iter().flatten()) - .set_default_features( - self.default_features - .or(self.default_features2) - .unwrap_or(true), - ) - .set_optional(self.optional.unwrap_or(false)) - .set_platform(cx.platform.clone()); - if let Some(registry) = &self.registry { - let registry_id = SourceId::alt_registry(cx.config, registry)?; - dep.set_registry_id(registry_id); - } - if let Some(registry_index) = &self.registry_index { - let url = registry_index.into_url()?; - let registry_id = SourceId::for_registry(&url)?; - dep.set_registry_id(registry_id); - } - - if let Some(kind) = kind { - dep.set_kind(kind); - } - if let Some(name_in_toml) = explicit_name_in_toml { - cx.features.require(Feature::rename_dependency())?; - dep.set_explicit_name_in_toml(name_in_toml); - } - - if let Some(p) = self.public { - cx.features.require(Feature::public_dependency())?; - - if dep.kind() != DepKind::Normal { - bail!("'public' specifier can only be used on regular dependencies, not {:?} dependencies", dep.kind()); - } - - dep.set_public(p); - } - Ok(dep) - } -} - -#[derive(Default, Serialize, Deserialize, Debug, Clone)] -struct TomlTarget { - name: Option, - - // The intention was to only accept `crate-type` here but historical - // versions of Cargo also accepted `crate_type`, so look for both. - #[serde(rename = "crate-type")] - crate_type: Option>, - #[serde(rename = "crate_type")] - crate_type2: Option>, - - path: Option, - // Note that `filename` is used for the cargo-feature `different_binary_name` - filename: Option, - test: Option, - doctest: Option, - bench: Option, - doc: Option, - plugin: Option, - #[serde(rename = "proc-macro")] - proc_macro_raw: Option, - #[serde(rename = "proc_macro")] - proc_macro_raw2: Option, - harness: Option, - #[serde(rename = "required-features")] - required_features: Option>, - edition: Option, -} - -#[derive(Clone)] -struct PathValue(PathBuf); - -impl<'de> de::Deserialize<'de> for PathValue { - fn deserialize(deserializer: D) -> Result - where - D: de::Deserializer<'de>, - { - Ok(PathValue(String::deserialize(deserializer)?.into())) - } -} - -impl ser::Serialize for PathValue { - fn serialize(&self, serializer: S) -> Result - where - S: ser::Serializer, - { - self.0.serialize(serializer) - } -} - -/// Corresponds to a `target` entry, but `TomlTarget` is already used. -#[derive(Serialize, Deserialize, Debug)] -struct TomlPlatform { - dependencies: Option>, - #[serde(rename = "build-dependencies")] - build_dependencies: Option>, - #[serde(rename = "build_dependencies")] - build_dependencies2: Option>, - #[serde(rename = "dev-dependencies")] - dev_dependencies: Option>, - #[serde(rename = "dev_dependencies")] - dev_dependencies2: Option>, -} - -impl TomlTarget { - fn new() -> TomlTarget { - TomlTarget::default() - } - - fn name(&self) -> String { - match self.name { - Some(ref name) => name.clone(), - None => panic!("target name is required"), - } - } - - fn proc_macro(&self) -> Option { - self.proc_macro_raw.or(self.proc_macro_raw2).or_else(|| { - if let Some(types) = self.crate_types() { - if types.contains(&"proc-macro".to_string()) { - return Some(true); - } - } - None - }) - } - - fn crate_types(&self) -> Option<&Vec> { - self.crate_type - .as_ref() - .or_else(|| self.crate_type2.as_ref()) - } -} - -impl fmt::Debug for PathValue { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/toml/targets.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/toml/targets.rs deleted file mode 100644 index 473994b28..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/toml/targets.rs +++ /dev/null @@ -1,962 +0,0 @@ -//! This module implements Cargo conventions for directory layout: -//! -//! * `src/lib.rs` is a library -//! * `src/main.rs` is a binary -//! * `src/bin/*.rs` are binaries -//! * `examples/*.rs` are examples -//! * `tests/*.rs` are integration tests -//! * `benches/*.rs` are benchmarks -//! -//! It is a bit tricky because we need match explicit information from `Cargo.toml` -//! with implicit info in directory layout. - -use std::collections::HashSet; -use std::fs::{self, DirEntry}; -use std::path::{Path, PathBuf}; - -use super::{ - PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, TomlExampleTarget, - TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget, -}; -use crate::core::compiler::CrateType; -use crate::core::{Edition, Feature, Features, Target}; -use crate::util::errors::CargoResult; -use crate::util::restricted_names; - -use anyhow::Context as _; - -const DEFAULT_TEST_DIR_NAME: &'static str = "tests"; -const DEFAULT_BENCH_DIR_NAME: &'static str = "benches"; -const DEFAULT_EXAMPLE_DIR_NAME: &'static str = "examples"; -const DEFAULT_BIN_DIR_NAME: &'static str = "bin"; - -pub fn targets( - features: &Features, - manifest: &TomlManifest, - package_name: &str, - package_root: &Path, - edition: Edition, - custom_build: &Option, - metabuild: &Option, - warnings: &mut Vec, - errors: &mut Vec, -) -> CargoResult> { - let mut targets = Vec::new(); - - let has_lib; - - if let Some(target) = clean_lib( - features, - manifest.lib.as_ref(), - package_root, - package_name, - edition, - warnings, - )? { - targets.push(target); - has_lib = true; - } else { - has_lib = false; - } - - let package = manifest - .package - .as_ref() - .or_else(|| manifest.project.as_ref()) - .ok_or_else(|| anyhow::format_err!("manifest has no `package` (or `project`)"))?; - - targets.extend(clean_bins( - features, - manifest.bin.as_ref(), - package_root, - package_name, - edition, - package.autobins, - warnings, - errors, - has_lib, - )?); - - targets.extend(clean_examples( - features, - manifest.example.as_ref(), - package_root, - edition, - package.autoexamples, - warnings, - errors, - )?); - - targets.extend(clean_tests( - features, - manifest.test.as_ref(), - package_root, - edition, - package.autotests, - warnings, - errors, - )?); - - targets.extend(clean_benches( - features, - manifest.bench.as_ref(), - package_root, - edition, - package.autobenches, - warnings, - errors, - )?); - - // processing the custom build script - if let Some(custom_build) = manifest.maybe_custom_build(custom_build, package_root) { - if metabuild.is_some() { - anyhow::bail!("cannot specify both `metabuild` and `build`"); - } - let name = format!( - "build-script-{}", - custom_build - .file_stem() - .and_then(|s| s.to_str()) - .unwrap_or("") - ); - targets.push(Target::custom_build_target( - &name, - package_root.join(custom_build), - edition, - )); - } - if let Some(metabuild) = metabuild { - // Verify names match available build deps. - let bdeps = manifest.build_dependencies.as_ref(); - for name in &metabuild.0 { - if !bdeps.map_or(false, |bd| bd.contains_key(name)) { - anyhow::bail!( - "metabuild package `{}` must be specified in `build-dependencies`", - name - ); - } - } - - targets.push(Target::metabuild_target(&format!( - "metabuild-{}", - package.name - ))); - } - - Ok(targets) -} - -fn clean_lib( - features: &Features, - toml_lib: Option<&TomlLibTarget>, - package_root: &Path, - package_name: &str, - edition: Edition, - warnings: &mut Vec, -) -> CargoResult> { - let inferred = inferred_lib(package_root); - let lib = match toml_lib { - Some(lib) => { - if let Some(ref name) = lib.name { - // XXX: other code paths dodge this validation - if name.contains('-') { - anyhow::bail!("library target names cannot contain hyphens: {}", name) - } - } - Some(TomlTarget { - name: lib.name.clone().or_else(|| Some(package_name.to_owned())), - ..lib.clone() - }) - } - None => inferred.as_ref().map(|lib| TomlTarget { - name: Some(package_name.to_string()), - path: Some(PathValue(lib.clone())), - ..TomlTarget::new() - }), - }; - - let lib = match lib { - Some(ref lib) => lib, - None => return Ok(None), - }; - - validate_target_name(lib, "library", "lib", warnings)?; - - let path = match (lib.path.as_ref(), inferred) { - (Some(path), _) => package_root.join(&path.0), - (None, Some(path)) => path, - (None, None) => { - let legacy_path = package_root.join("src").join(format!("{}.rs", lib.name())); - if edition == Edition::Edition2015 && legacy_path.exists() { - warnings.push(format!( - "path `{}` was erroneously implicitly accepted for library `{}`,\n\ - please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", - legacy_path.display(), - lib.name() - )); - legacy_path - } else { - anyhow::bail!( - "can't find library `{}`, \ - rename file to `src/lib.rs` or specify lib.path", - lib.name() - ) - } - } - }; - - // Per the Macros 1.1 RFC: - // - // > Initially if a crate is compiled with the `proc-macro` crate type - // > (and possibly others) it will forbid exporting any items in the - // > crate other than those functions tagged #[proc_macro_derive] and - // > those functions must also be placed at the crate root. - // - // A plugin requires exporting plugin_registrar so a crate cannot be - // both at once. - let crate_types = match (lib.crate_types(), lib.plugin, lib.proc_macro()) { - (Some(kinds), _, _) if kinds.contains(&"proc-macro".to_string()) => { - if let Some(true) = lib.plugin { - // This is a warning to retain backwards compatibility. - warnings.push(format!( - "proc-macro library `{}` should not specify `plugin = true`", - lib.name() - )); - } - warnings.push(format!( - "library `{}` should only specify `proc-macro = true` instead of setting `crate-type`", - lib.name() - )); - if kinds.len() > 1 { - anyhow::bail!("cannot mix `proc-macro` crate type with others"); - } - vec![CrateType::ProcMacro] - } - (_, Some(true), Some(true)) => { - anyhow::bail!("`lib.plugin` and `lib.proc-macro` cannot both be `true`") - } - (Some(kinds), _, _) => kinds.iter().map(|s| s.into()).collect(), - (None, Some(true), _) => vec![CrateType::Dylib], - (None, _, Some(true)) => vec![CrateType::ProcMacro], - (None, _, _) => vec![CrateType::Lib], - }; - - let mut target = Target::lib_target(&lib.name(), crate_types, path, edition); - configure(features, lib, &mut target)?; - Ok(Some(target)) -} - -fn clean_bins( - features: &Features, - toml_bins: Option<&Vec>, - package_root: &Path, - package_name: &str, - edition: Edition, - autodiscover: Option, - warnings: &mut Vec, - errors: &mut Vec, - has_lib: bool, -) -> CargoResult> { - let inferred = inferred_bins(package_root, package_name); - - let bins = toml_targets_and_inferred( - toml_bins, - &inferred, - package_root, - autodiscover, - edition, - warnings, - "binary", - "bin", - "autobins", - ); - - // This loop performs basic checks on each of the TomlTarget in `bins`. - for bin in &bins { - // For each binary, check if the `filename` parameter is populated. If it is, - // check if the corresponding cargo feature has been activated. - if bin.filename.is_some() { - features.require(Feature::different_binary_name())?; - } - - validate_target_name(bin, "binary", "bin", warnings)?; - - let name = bin.name(); - - if let Some(crate_types) = bin.crate_types() { - if !crate_types.is_empty() { - errors.push(format!( - "the target `{}` is a binary and can't have any \ - crate-types set (currently \"{}\")", - name, - crate_types.join(", ") - )); - } - } - - if bin.proc_macro() == Some(true) { - errors.push(format!( - "the target `{}` is a binary and can't have `proc-macro` \ - set `true`", - name - )); - } - - if restricted_names::is_conflicting_artifact_name(&name) { - anyhow::bail!( - "the binary target name `{}` is forbidden, \ - it conflicts with with cargo's build directory names", - name - ) - } - } - - validate_unique_names(&bins, "binary")?; - - let mut result = Vec::new(); - for bin in &bins { - let path = target_path(bin, &inferred, "bin", package_root, edition, &mut |_| { - if let Some(legacy_path) = legacy_bin_path(package_root, &bin.name(), has_lib) { - warnings.push(format!( - "path `{}` was erroneously implicitly accepted for binary `{}`,\n\ - please set bin.path in Cargo.toml", - legacy_path.display(), - bin.name() - )); - Some(legacy_path) - } else { - None - } - }); - let path = match path { - Ok(path) => path, - Err(e) => anyhow::bail!("{}", e), - }; - - let mut target = Target::bin_target( - &bin.name(), - bin.filename.clone(), - path, - bin.required_features.clone(), - edition, - ); - - configure(features, bin, &mut target)?; - result.push(target); - } - return Ok(result); - - fn legacy_bin_path(package_root: &Path, name: &str, has_lib: bool) -> Option { - if !has_lib { - let path = package_root.join("src").join(format!("{}.rs", name)); - if path.exists() { - return Some(path); - } - } - let path = package_root.join("src").join("main.rs"); - if path.exists() { - return Some(path); - } - - let path = package_root - .join("src") - .join(DEFAULT_BIN_DIR_NAME) - .join("main.rs"); - if path.exists() { - return Some(path); - } - None - } -} - -fn clean_examples( - features: &Features, - toml_examples: Option<&Vec>, - package_root: &Path, - edition: Edition, - autodiscover: Option, - warnings: &mut Vec, - errors: &mut Vec, -) -> CargoResult> { - let inferred = infer_from_directory(&package_root.join(DEFAULT_EXAMPLE_DIR_NAME)); - - let targets = clean_targets( - "example", - "example", - toml_examples, - &inferred, - package_root, - edition, - autodiscover, - warnings, - errors, - "autoexamples", - )?; - - let mut result = Vec::new(); - for (path, toml) in targets { - let crate_types = match toml.crate_types() { - Some(kinds) => kinds.iter().map(|s| s.into()).collect(), - None => Vec::new(), - }; - - let mut target = Target::example_target( - &toml.name(), - crate_types, - path, - toml.required_features.clone(), - edition, - ); - configure(features, &toml, &mut target)?; - result.push(target); - } - - Ok(result) -} - -fn clean_tests( - features: &Features, - toml_tests: Option<&Vec>, - package_root: &Path, - edition: Edition, - autodiscover: Option, - warnings: &mut Vec, - errors: &mut Vec, -) -> CargoResult> { - let inferred = infer_from_directory(&package_root.join(DEFAULT_TEST_DIR_NAME)); - - let targets = clean_targets( - "test", - "test", - toml_tests, - &inferred, - package_root, - edition, - autodiscover, - warnings, - errors, - "autotests", - )?; - - let mut result = Vec::new(); - for (path, toml) in targets { - let mut target = - Target::test_target(&toml.name(), path, toml.required_features.clone(), edition); - configure(features, &toml, &mut target)?; - result.push(target); - } - Ok(result) -} - -fn clean_benches( - features: &Features, - toml_benches: Option<&Vec>, - package_root: &Path, - edition: Edition, - autodiscover: Option, - warnings: &mut Vec, - errors: &mut Vec, -) -> CargoResult> { - let mut legacy_warnings = vec![]; - - let targets = { - let mut legacy_bench_path = |bench: &TomlTarget| { - let legacy_path = package_root.join("src").join("bench.rs"); - if !(bench.name() == "bench" && legacy_path.exists()) { - return None; - } - legacy_warnings.push(format!( - "path `{}` was erroneously implicitly accepted for benchmark `{}`,\n\ - please set bench.path in Cargo.toml", - legacy_path.display(), - bench.name() - )); - Some(legacy_path) - }; - - let inferred = infer_from_directory(&package_root.join("benches")); - - clean_targets_with_legacy_path( - "benchmark", - "bench", - toml_benches, - &inferred, - package_root, - edition, - autodiscover, - warnings, - errors, - &mut legacy_bench_path, - "autobenches", - )? - }; - - warnings.append(&mut legacy_warnings); - - let mut result = Vec::new(); - for (path, toml) in targets { - let mut target = - Target::bench_target(&toml.name(), path, toml.required_features.clone(), edition); - configure(features, &toml, &mut target)?; - result.push(target); - } - - Ok(result) -} - -fn clean_targets( - target_kind_human: &str, - target_kind: &str, - toml_targets: Option<&Vec>, - inferred: &[(String, PathBuf)], - package_root: &Path, - edition: Edition, - autodiscover: Option, - warnings: &mut Vec, - errors: &mut Vec, - autodiscover_flag_name: &str, -) -> CargoResult> { - clean_targets_with_legacy_path( - target_kind_human, - target_kind, - toml_targets, - inferred, - package_root, - edition, - autodiscover, - warnings, - errors, - &mut |_| None, - autodiscover_flag_name, - ) -} - -fn clean_targets_with_legacy_path( - target_kind_human: &str, - target_kind: &str, - toml_targets: Option<&Vec>, - inferred: &[(String, PathBuf)], - package_root: &Path, - edition: Edition, - autodiscover: Option, - warnings: &mut Vec, - errors: &mut Vec, - legacy_path: &mut dyn FnMut(&TomlTarget) -> Option, - autodiscover_flag_name: &str, -) -> CargoResult> { - let toml_targets = toml_targets_and_inferred( - toml_targets, - inferred, - package_root, - autodiscover, - edition, - warnings, - target_kind_human, - target_kind, - autodiscover_flag_name, - ); - - for target in &toml_targets { - validate_target_name(target, target_kind_human, target_kind, warnings)?; - } - - validate_unique_names(&toml_targets, target_kind)?; - let mut result = Vec::new(); - for target in toml_targets { - let path = target_path( - &target, - inferred, - target_kind, - package_root, - edition, - legacy_path, - ); - let path = match path { - Ok(path) => path, - Err(e) => { - errors.push(e); - continue; - } - }; - result.push((path, target)); - } - Ok(result) -} - -fn inferred_lib(package_root: &Path) -> Option { - let lib = package_root.join("src").join("lib.rs"); - if lib.exists() { - Some(lib) - } else { - None - } -} - -fn inferred_bins(package_root: &Path, package_name: &str) -> Vec<(String, PathBuf)> { - let main = package_root.join("src").join("main.rs"); - let mut result = Vec::new(); - if main.exists() { - result.push((package_name.to_string(), main)); - } - result.extend(infer_from_directory( - &package_root.join("src").join(DEFAULT_BIN_DIR_NAME), - )); - - result -} - -fn infer_from_directory(directory: &Path) -> Vec<(String, PathBuf)> { - let entries = match fs::read_dir(directory) { - Err(_) => return Vec::new(), - Ok(dir) => dir, - }; - - entries - .filter_map(|e| e.ok()) - .filter(is_not_dotfile) - .filter_map(|d| infer_any(&d)) - .collect() -} - -fn infer_any(entry: &DirEntry) -> Option<(String, PathBuf)> { - if entry.path().extension().and_then(|p| p.to_str()) == Some("rs") { - infer_file(entry) - } else if entry.file_type().map(|t| t.is_dir()).ok() == Some(true) { - infer_subdirectory(entry) - } else { - None - } -} - -fn infer_file(entry: &DirEntry) -> Option<(String, PathBuf)> { - let path = entry.path(); - path.file_stem() - .and_then(|p| p.to_str()) - .map(|p| (p.to_owned(), path.clone())) -} - -fn infer_subdirectory(entry: &DirEntry) -> Option<(String, PathBuf)> { - let path = entry.path(); - let main = path.join("main.rs"); - let name = path.file_name().and_then(|n| n.to_str()); - match (name, main.exists()) { - (Some(name), true) => Some((name.to_owned(), main)), - _ => None, - } -} - -fn is_not_dotfile(entry: &DirEntry) -> bool { - entry.file_name().to_str().map(|s| s.starts_with('.')) == Some(false) -} - -fn toml_targets_and_inferred( - toml_targets: Option<&Vec>, - inferred: &[(String, PathBuf)], - package_root: &Path, - autodiscover: Option, - edition: Edition, - warnings: &mut Vec, - target_kind_human: &str, - target_kind: &str, - autodiscover_flag_name: &str, -) -> Vec { - let inferred_targets = inferred_to_toml_targets(inferred); - match toml_targets { - None => { - if let Some(false) = autodiscover { - vec![] - } else { - inferred_targets - } - } - Some(targets) => { - let mut targets = targets.clone(); - - let target_path = - |target: &TomlTarget| target.path.clone().map(|p| package_root.join(p.0)); - - let mut seen_names = HashSet::new(); - let mut seen_paths = HashSet::new(); - for target in targets.iter() { - seen_names.insert(target.name.clone()); - seen_paths.insert(target_path(target)); - } - - let mut rem_targets = vec![]; - for target in inferred_targets { - if !seen_names.contains(&target.name) && !seen_paths.contains(&target_path(&target)) - { - rem_targets.push(target); - } - } - - let autodiscover = match autodiscover { - Some(autodiscover) => autodiscover, - None => { - if edition == Edition::Edition2015 { - if !rem_targets.is_empty() { - let mut rem_targets_str = String::new(); - for t in rem_targets.iter() { - if let Some(p) = t.path.clone() { - rem_targets_str.push_str(&format!("* {}\n", p.0.display())) - } - } - warnings.push(format!( - "\ -An explicit [[{section}]] section is specified in Cargo.toml which currently -disables Cargo from automatically inferring other {target_kind_human} targets. -This inference behavior will change in the Rust 2018 edition and the following -files will be included as a {target_kind_human} target: - -{rem_targets_str} -This is likely to break cargo build or cargo test as these files may not be -ready to be compiled as a {target_kind_human} target today. You can future-proof yourself -and disable this warning by adding `{autodiscover_flag_name} = false` to your [package] -section. You may also move the files to a location where Cargo would not -automatically infer them to be a target, such as in subfolders. - -For more information on this warning you can consult -https://github.com/rust-lang/cargo/issues/5330", - section = target_kind, - target_kind_human = target_kind_human, - rem_targets_str = rem_targets_str, - autodiscover_flag_name = autodiscover_flag_name, - )); - }; - false - } else { - true - } - } - }; - - if autodiscover { - targets.append(&mut rem_targets); - } - - targets - } - } -} - -fn inferred_to_toml_targets(inferred: &[(String, PathBuf)]) -> Vec { - inferred - .iter() - .map(|&(ref name, ref path)| TomlTarget { - name: Some(name.clone()), - path: Some(PathValue(path.clone())), - ..TomlTarget::new() - }) - .collect() -} - -fn validate_target_name( - target: &TomlTarget, - target_kind_human: &str, - target_kind: &str, - warnings: &mut Vec, -) -> CargoResult<()> { - match target.name { - Some(ref name) => { - if name.trim().is_empty() { - anyhow::bail!("{} target names cannot be empty", target_kind_human) - } - if cfg!(windows) && restricted_names::is_windows_reserved(name) { - warnings.push(format!( - "{} target `{}` is a reserved Windows filename, \ - this target will not work on Windows platforms", - target_kind_human, name - )); - } - } - None => anyhow::bail!( - "{} target {}.name is required", - target_kind_human, - target_kind - ), - } - - Ok(()) -} - -/// Will check a list of toml targets, and make sure the target names are unique within a vector. -fn validate_unique_names(targets: &[TomlTarget], target_kind: &str) -> CargoResult<()> { - let mut seen = HashSet::new(); - for name in targets.iter().map(|e| e.name()) { - if !seen.insert(name.clone()) { - anyhow::bail!( - "found duplicate {target_kind} name {name}, \ - but all {target_kind} targets must have a unique name", - target_kind = target_kind, - name = name - ); - } - } - Ok(()) -} - -fn configure(features: &Features, toml: &TomlTarget, target: &mut Target) -> CargoResult<()> { - let t2 = target.clone(); - target - .set_tested(toml.test.unwrap_or_else(|| t2.tested())) - .set_doc(toml.doc.unwrap_or_else(|| t2.documented())) - .set_doctest(toml.doctest.unwrap_or_else(|| t2.doctested())) - .set_benched(toml.bench.unwrap_or_else(|| t2.benched())) - .set_harness(toml.harness.unwrap_or_else(|| t2.harness())) - .set_proc_macro(toml.proc_macro().unwrap_or_else(|| t2.proc_macro())) - .set_for_host(match (toml.plugin, toml.proc_macro()) { - (None, None) => t2.for_host(), - (Some(true), _) | (_, Some(true)) => true, - (Some(false), _) | (_, Some(false)) => false, - }); - if let Some(edition) = toml.edition.clone() { - features - .require(Feature::edition()) - .with_context(|| "editions are unstable")?; - target.set_edition( - edition - .parse() - .with_context(|| "failed to parse the `edition` key")?, - ); - } - Ok(()) -} - -/// Build an error message for a target path that cannot be determined either -/// by auto-discovery or specifiying. -/// -/// This function tries to detect commonly wrong paths for targets: -/// -/// test -> tests/*.rs, tests/*/main.rs -/// bench -> benches/*.rs, benches/*/main.rs -/// example -> examples/*.rs, examples/*/main.rs -/// bin -> src/bin/*.rs, src/bin/*/main.rs -/// -/// Note that the logic need to sync with [`infer_from_directory`] if changes. -fn target_path_not_found_error_message( - package_root: &Path, - target: &TomlTarget, - target_kind: &str, -) -> String { - fn possible_target_paths(name: &str, kind: &str, commonly_wrong: bool) -> [PathBuf; 2] { - let mut target_path = PathBuf::new(); - match (kind, commonly_wrong) { - // commonly wrong paths - ("test" | "bench" | "example", true) => target_path.push(kind), - ("bin", true) => { - target_path.push("src"); - target_path.push("bins"); - } - // default inferred paths - ("test", false) => target_path.push(DEFAULT_TEST_DIR_NAME), - ("bench", false) => target_path.push(DEFAULT_BENCH_DIR_NAME), - ("example", false) => target_path.push(DEFAULT_EXAMPLE_DIR_NAME), - ("bin", false) => { - target_path.push("src"); - target_path.push(DEFAULT_BIN_DIR_NAME); - } - _ => unreachable!("invalid target kind: {}", kind), - } - target_path.push(name); - - let target_path_file = { - let mut path = target_path.clone(); - path.set_extension("rs"); - path - }; - let target_path_subdir = { - target_path.push("main.rs"); - target_path - }; - return [target_path_file, target_path_subdir]; - } - - let target_name = target.name(); - let commonly_wrong_paths = possible_target_paths(&target_name, target_kind, true); - let possible_paths = possible_target_paths(&target_name, target_kind, false); - let existing_wrong_path_index = match ( - package_root.join(&commonly_wrong_paths[0]).exists(), - package_root.join(&commonly_wrong_paths[1]).exists(), - ) { - (true, _) => Some(0), - (_, true) => Some(1), - _ => None, - }; - - if let Some(i) = existing_wrong_path_index { - return format!( - "\ -can't find `{name}` {kind} at default paths, but found a file at `{wrong_path}`. -Perhaps rename the file to `{possible_path}` for target auto-discovery, \ -or specify {kind}.path if you want to use a non-default path.", - name = target_name, - kind = target_kind, - wrong_path = commonly_wrong_paths[i].display(), - possible_path = possible_paths[i].display(), - ); - } - - format!( - "can't find `{name}` {kind} at `{path_file}` or `{path_dir}`. \ - Please specify {kind}.path if you want to use a non-default path.", - name = target_name, - kind = target_kind, - path_file = possible_paths[0].display(), - path_dir = possible_paths[1].display(), - ) -} - -fn target_path( - target: &TomlTarget, - inferred: &[(String, PathBuf)], - target_kind: &str, - package_root: &Path, - edition: Edition, - legacy_path: &mut dyn FnMut(&TomlTarget) -> Option, -) -> Result { - if let Some(ref path) = target.path { - // Should we verify that this path exists here? - return Ok(package_root.join(&path.0)); - } - let name = target.name(); - - let mut matching = inferred - .iter() - .filter(|&&(ref n, _)| n == &name) - .map(|&(_, ref p)| p.clone()); - - let first = matching.next(); - let second = matching.next(); - match (first, second) { - (Some(path), None) => Ok(path), - (None, None) => { - if edition == Edition::Edition2015 { - if let Some(path) = legacy_path(target) { - return Ok(path); - } - } - Err(target_path_not_found_error_message( - package_root, - target, - target_kind, - )) - } - (Some(p0), Some(p1)) => { - if edition == Edition::Edition2015 { - if let Some(path) = legacy_path(target) { - return Ok(path); - } - } - Err(format!( - "\ -cannot infer path for `{}` {} -Cargo doesn't know which to use because multiple target files found at `{}` and `{}`.", - target.name(), - target_kind, - p0.strip_prefix(package_root).unwrap_or(&p0).display(), - p1.strip_prefix(package_root).unwrap_or(&p1).display(), - )) - } - (None, Some(_)) => unreachable!(), - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/vcs.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/vcs.rs deleted file mode 100644 index 1477510e2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/vcs.rs +++ /dev/null @@ -1,100 +0,0 @@ -use crate::util::CargoResult; -use cargo_util::paths; -use cargo_util::ProcessBuilder; -use std::path::Path; - -// Check if we are in an existing repo. We define that to be true if either: -// -// 1. We are in a git repo and the path to the new package is not an ignored -// path in that repo. -// 2. We are in an HG repo. -pub fn existing_vcs_repo(path: &Path, cwd: &Path) -> bool { - fn in_git_repo(path: &Path, cwd: &Path) -> bool { - if let Ok(repo) = GitRepo::discover(path, cwd) { - // Don't check if the working directory itself is ignored. - if repo.workdir().map_or(false, |workdir| workdir == path) { - true - } else { - !repo.is_path_ignored(path).unwrap_or(false) - } - } else { - false - } - } - - in_git_repo(path, cwd) || HgRepo::discover(path, cwd).is_ok() -} - -pub struct HgRepo; -pub struct GitRepo; -pub struct PijulRepo; -pub struct FossilRepo; - -impl GitRepo { - pub fn init(path: &Path, _: &Path) -> CargoResult { - git2::Repository::init(path)?; - Ok(GitRepo) - } - pub fn discover(path: &Path, _: &Path) -> Result { - git2::Repository::discover(path) - } -} - -impl HgRepo { - pub fn init(path: &Path, cwd: &Path) -> CargoResult { - ProcessBuilder::new("hg") - .cwd(cwd) - .arg("init") - .arg(path) - .exec()?; - Ok(HgRepo) - } - pub fn discover(path: &Path, cwd: &Path) -> CargoResult { - ProcessBuilder::new("hg") - .cwd(cwd) - .arg("--cwd") - .arg(path) - .arg("root") - .exec_with_output()?; - Ok(HgRepo) - } -} - -impl PijulRepo { - pub fn init(path: &Path, cwd: &Path) -> CargoResult { - ProcessBuilder::new("pijul") - .cwd(cwd) - .arg("init") - .arg(path) - .exec()?; - Ok(PijulRepo) - } -} - -impl FossilRepo { - pub fn init(path: &Path, cwd: &Path) -> CargoResult { - // fossil doesn't create the directory so we'll do that first - paths::create_dir_all(path)?; - - // set up the paths we'll use - let db_fname = ".fossil"; - let mut db_path = path.to_owned(); - db_path.push(db_fname); - - // then create the fossil DB in that location - ProcessBuilder::new("fossil") - .cwd(cwd) - .arg("init") - .arg(&db_path) - .exec()?; - - // open it in that new directory - ProcessBuilder::new("fossil") - .cwd(&path) - .arg("open") - .arg(db_fname) - .exec()?; - - Ok(FossilRepo) - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/workspace.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/workspace.rs deleted file mode 100644 index e8317f101..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/util/workspace.rs +++ /dev/null @@ -1,130 +0,0 @@ -use crate::core::compiler::Unit; -use crate::core::manifest::TargetSourcePath; -use crate::core::{Target, Workspace}; -use crate::ops::CompileOptions; -use crate::util::CargoResult; -use anyhow::bail; -use cargo_util::ProcessBuilder; -use std::fmt::Write; -use std::path::PathBuf; - -fn get_available_targets<'a>( - filter_fn: fn(&Target) -> bool, - ws: &'a Workspace<'_>, - options: &'a CompileOptions, -) -> CargoResult> { - let packages = options.spec.get_packages(ws)?; - - let mut targets: Vec<_> = packages - .into_iter() - .flat_map(|pkg| { - pkg.manifest() - .targets() - .iter() - .filter(|target| filter_fn(target)) - }) - .map(Target::name) - .collect(); - - targets.sort(); - - Ok(targets) -} - -fn print_available_targets( - filter_fn: fn(&Target) -> bool, - ws: &Workspace<'_>, - options: &CompileOptions, - option_name: &str, - plural_name: &str, -) -> CargoResult<()> { - let targets = get_available_targets(filter_fn, ws, options)?; - - let mut output = String::new(); - writeln!(output, "\"{}\" takes one argument.", option_name)?; - - if targets.is_empty() { - writeln!(output, "No {} available.", plural_name)?; - } else { - writeln!(output, "Available {}:", plural_name)?; - for target in targets { - writeln!(output, " {}", target)?; - } - } - bail!("{}", output) -} - -pub fn print_available_packages(ws: &Workspace<'_>) -> CargoResult<()> { - let packages = ws - .members() - .map(|pkg| pkg.name().as_str()) - .collect::>(); - - let mut output = "\"--package \" requires a SPEC format value, \ - which can be any package ID specifier in the dependency graph.\n\ - Run `cargo help pkgid` for more information about SPEC format.\n\n" - .to_string(); - - if packages.is_empty() { - // This would never happen. - // Just in case something regresses we covers it here. - writeln!(output, "No packages available.")?; - } else { - writeln!(output, "Possible packages/workspace members:")?; - for package in packages { - writeln!(output, " {}", package)?; - } - } - bail!("{}", output) -} - -pub fn print_available_examples(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { - print_available_targets(Target::is_example, ws, options, "--example", "examples") -} - -pub fn print_available_binaries(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { - print_available_targets(Target::is_bin, ws, options, "--bin", "binaries") -} - -pub fn print_available_benches(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { - print_available_targets(Target::is_bench, ws, options, "--bench", "benches") -} - -pub fn print_available_tests(ws: &Workspace<'_>, options: &CompileOptions) -> CargoResult<()> { - print_available_targets(Target::is_test, ws, options, "--test", "tests") -} - -/// The path that we pass to rustc is actually fairly important because it will -/// show up in error messages (important for readability), debug information -/// (important for caching), etc. As a result we need to be pretty careful how we -/// actually invoke rustc. -/// -/// In general users don't expect `cargo build` to cause rebuilds if you change -/// directories. That could be if you just change directories in the package or -/// if you literally move the whole package wholesale to a new directory. As a -/// result we mostly don't factor in `cwd` to this calculation. Instead we try to -/// track the workspace as much as possible and we update the current directory -/// of rustc/rustdoc where appropriate. -/// -/// The first returned value here is the argument to pass to rustc, and the -/// second is the cwd that rustc should operate in. -pub fn path_args(ws: &Workspace<'_>, unit: &Unit) -> (PathBuf, PathBuf) { - let ws_root = ws.root(); - let src = match unit.target.src_path() { - TargetSourcePath::Path(path) => path.to_path_buf(), - TargetSourcePath::Metabuild => unit.pkg.manifest().metabuild_path(ws.target_dir()), - }; - assert!(src.is_absolute()); - if unit.pkg.package_id().source_id().is_path() { - if let Ok(path) = src.strip_prefix(ws_root) { - return (path.to_path_buf(), ws_root.to_path_buf()); - } - } - (src, unit.pkg.root().to_path_buf()) -} - -pub fn add_path_args(ws: &Workspace<'_>, unit: &Unit, cmd: &mut ProcessBuilder) { - let (arg, cwd) = path_args(ws, unit); - cmd.arg(arg); - cmd.cwd(cwd); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/version.rs b/collector/compile-benchmarks/cargo-0.60.0/src/cargo/version.rs deleted file mode 100644 index 6dbb9d1c1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/cargo/version.rs +++ /dev/null @@ -1,95 +0,0 @@ -//! Code for representing cargo's release version number. - -use std::fmt; - -/// Information about the git repository where cargo was built from. -pub struct CommitInfo { - pub short_commit_hash: String, - pub commit_hash: String, - pub commit_date: String, -} - -/// Information provided by the outer build system (rustbuild aka bootstrap). -pub struct CfgInfo { - /// Information about the Git repository we may have been built from. - pub commit_info: Option, - /// The release channel we were built for (stable/beta/nightly/dev). - pub release_channel: String, -} - -/// Cargo's version. -pub struct VersionInfo { - /// Cargo's version, such as "1.57.0", "1.58.0-beta.1", "1.59.0-nightly", etc. - pub version: String, - /// Information that's only available when we were built with - /// rustbuild, rather than Cargo itself. - pub cfg_info: Option, -} - -impl fmt::Display for VersionInfo { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.version)?; - - if let Some(ref cfg) = self.cfg_info { - if let Some(ref ci) = cfg.commit_info { - write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?; - } - }; - Ok(()) - } -} - -/// Returns information about cargo's version. -pub fn version() -> VersionInfo { - macro_rules! option_env_str { - ($name:expr) => { - option_env!($name).map(|s| s.to_string()) - }; - } - - // This is the version set in rustbuild, which we use to match rustc. - let version = option_env_str!("CFG_RELEASE").unwrap_or_else(|| { - // If cargo is not being built by rustbuild, then we just use the - // version from cargo's own `Cargo.toml`. - // - // There are two versions at play here: - // - version of cargo-the-binary, which you see when you type `cargo --version` - // - version of cargo-the-library, which you download from crates.io for use - // in your packages. - // - // The library is permanently unstable, so it always has a 0 major - // version. However, the CLI now reports a stable 1.x version - // (starting in 1.26) which stays in sync with rustc's version. - // - // Coincidentally, the minor version for cargo-the-library is always - // +1 of rustc's minor version (that is, `rustc 1.11.0` corresponds to - // `cargo `0.12.0`). The versions always get bumped in lockstep, so - // this should continue to hold. - let minor = env!("CARGO_PKG_VERSION_MINOR").parse::().unwrap() - 1; - let patch = env!("CARGO_PKG_VERSION_PATCH").parse::().unwrap(); - format!("1.{}.{}", minor, patch) - }); - - match option_env!("CFG_RELEASE_CHANNEL") { - // We have environment variables set up from configure/make. - Some(_) => { - let commit_info = option_env!("CFG_COMMIT_HASH").map(|s| CommitInfo { - commit_hash: s.to_string(), - short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(), - commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(), - }); - VersionInfo { - version, - cfg_info: Some(CfgInfo { - release_channel: option_env_str!("CFG_RELEASE_CHANNEL").unwrap(), - commit_info, - }), - } - } - // We are being compiled by Cargo itself. - None => VersionInfo { - version, - cfg_info: None, - }, - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/README.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/README.md deleted file mode 100644 index 79181b7f6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/README.md +++ /dev/null @@ -1,71 +0,0 @@ -# Cargo documentation - -This directory contains Cargo's documentation. There are two parts, [The Cargo -Book] which is built with [mdbook] and the man pages, which are built with -[mdman]. - -[The Cargo Book]: https://doc.rust-lang.org/cargo/ -[mdBook]: https://github.com/rust-lang/mdBook -[mdman]: https://github.com/rust-lang/cargo/tree/master/crates/mdman/ - -### Building the book - -Building the book requires [mdBook]. To get it: - -```console -$ cargo install mdbook -``` - -To build the book: - -```console -$ mdbook build -``` - -`mdbook` provides a variety of different commands and options to help you work -on the book: - -* `mdbook build --open`: Build the book and open it in a web browser. -* `mdbook serve`: Launches a web server on localhost. It also automatically - rebuilds the book whenever any file changes and automatically reloads your - web browser. - -The book contents are driven by the [`SUMMARY.md`](src/SUMMARY.md) file, and -every file must be linked there. - -### Building the man pages - -The man pages use a tool called [mdman] to convert markdown to a man page -format. Check out the documentation at -[`mdman/doc/`](../../crates/mdman/doc/) -for more details. - -The man pages are converted from a templated markdown (located in the -[`src/doc/man/`](man) -directory) to three different formats: - -1. Troff-style man pages, saved in [`src/etc/man/`](../etc/man). -2. Markdown (with some HTML) for the Cargo Book, saved in - [`src/doc/src/commands/`](src/commands). -3. Plain text (needed for embedded man pages on platforms without man such as - Windows), saved in [`src/doc/man/generated_txt/`](man/generated_txt). - -To rebuild the man pages, run the script `build-man.sh` in the `src/doc` directory. - -```console -$ ./build-man.sh -``` - -### SemVer chapter tests - -There is a script to verify that the examples in the SemVer chapter work as -intended. To run the tests, go into the `semver-check` directory and run -`cargo run`. - -## Contributing - -We'd love your help with improving the documentation! Please feel free to -[open issues](https://github.com/rust-lang/cargo/issues) about anything, and -send in PRs for things you'd like to fix or change. If your change is large, -please open an issue first, so we can make sure that it's something we'd -accept before you go through the work of getting a PR together. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/book.toml b/collector/compile-benchmarks/cargo-0.60.0/src/doc/book.toml deleted file mode 100644 index 350d00dc9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/book.toml +++ /dev/null @@ -1,7 +0,0 @@ -[book] -title = "The Cargo Book" -author = "Alex Crichton, Steve Klabnik and Carol Nichols, with Contributions from the Rust Community" - -[output.html] -git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/src" -edit-url-template = "https://github.com/rust-lang/cargo/edit/master/src/doc/{path}" diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/build-man.sh b/collector/compile-benchmarks/cargo-0.60.0/src/doc/build-man.sh deleted file mode 100755 index 7b1330b58..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/build-man.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash -# -# This script builds the Cargo man pages. -# -# The source for the man pages are located in src/doc/man/ in markdown format. -# These also are handlebars templates, see crates/mdman/README.md for details. -# -# The generated man pages are placed in the src/etc/man/ directory. The pages -# are also expanded into markdown (after being expanded by handlebars) and -# saved in the src/doc/src/commands/ directory. These are included in the -# Cargo book, which is converted to HTML by mdbook. - -set -e - -cd "$(dirname "${BASH_SOURCE[0]}")" - -OPTIONS="--url https://doc.rust-lang.org/cargo/commands/ \ - --man rustc:1=https://doc.rust-lang.org/rustc/index.html \ - --man rustdoc:1=https://doc.rust-lang.org/rustdoc/index.html" - -cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \ - -t md -o src/commands man/cargo*.md \ - $OPTIONS - -cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \ - -t txt -o man/generated_txt man/cargo*.md \ - $OPTIONS - -cargo run --manifest-path=../../crates/mdman/Cargo.toml -- \ - -t man -o ../etc/man man/cargo*.md \ - $OPTIONS diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/README.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/README.md deleted file mode 100644 index 57756211b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Cargo Contributor Guide - -This is the source of the Cargo Contributor Guide, published at -. It is written in Markdown, using -the [mdbook] tool to convert to HTML. If you are editing these pages, the best -option to view the results is to run `mdbook serve`, which will start a web -server on localhost that you can visit to view the book, and it will -automatically reload each time you edit a page. - -This is published via GitHub Actions to GitHub Pages. - -[mdbook]: https://rust-lang.github.io/mdBook/ diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/book.toml b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/book.toml deleted file mode 100644 index 878d03935..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/book.toml +++ /dev/null @@ -1,6 +0,0 @@ -[book] -title = "Cargo Contributor Guide" -authors = ["Eric Huss"] - -[output.html] -git-repository-url = "https://github.com/rust-lang/cargo/tree/master/src/doc/contrib/src" diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/SUMMARY.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/SUMMARY.md deleted file mode 100644 index 0d63295ff..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/SUMMARY.md +++ /dev/null @@ -1,20 +0,0 @@ -# Summary - -- [Introduction](./index.md) -- [Issue Tracker](./issues.md) -- [Process](./process/index.md) - - [Working on Cargo](./process/working-on-cargo.md) - - [Release process](./process/release.md) - - [Unstable features](./process/unstable.md) -- [Architecture](./architecture/index.md) - - [Codebase Overview](./architecture/codebase.md) - - [SubCommands](./architecture/subcommands.md) - - [Console Output](./architecture/console.md) - - [Packages and Resolution](./architecture/packages.md) - - [Compilation](./architecture/compilation.md) - - [Files](./architecture/files.md) -- [Tests](./tests/index.md) - - [Running Tests](./tests/running.md) - - [Writing Tests](./tests/writing.md) - - [Benchmarking and Profiling](./tests/profiling.md) -- [Design Principles](./design.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/codebase.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/codebase.md deleted file mode 100644 index 12c0c1710..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/codebase.md +++ /dev/null @@ -1,104 +0,0 @@ -# Codebase Overview - -This is a very high-level overview of the Cargo codebase. - -* [`src/bin/cargo`](https://github.com/rust-lang/cargo/tree/master/src/bin/cargo) - โ€” Cargo is split in a library and a binary. This is the binary side that - handles argument parsing, and then calls into the library to perform the - appropriate subcommand. Each Cargo subcommand is a separate module here. See - [SubCommands](subcommands.md). - -* [`src/cargo/ops`](https://github.com/rust-lang/cargo/tree/master/src/cargo/ops) - โ€” Every major operation is implemented here. This is where the binary CLI - usually calls into to perform the appropriate action. - - * [`src/cargo/ops/cargo_compile.rs`](https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/cargo_compile.rs) - โ€” This is the entry point for all the compilation commands. This is a - good place to start if you want to follow how compilation starts and - flows to completion. - -* [`src/cargo/core/resolver`](https://github.com/rust-lang/cargo/tree/master/src/cargo/core/resolver) - โ€” This is the dependency and feature resolvers. - -* [`src/cargo/core/compiler`](https://github.com/rust-lang/cargo/tree/master/src/cargo/core/compiler) - โ€” This is the code responsible for running `rustc` and `rustdoc`. - - * [`src/cargo/core/compiler/build_context/mod.rs`](https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/build_context/mod.rs) - โ€” The `BuildContext` is the result of the "front end" of the build - process. This contains the graph of work to perform and any settings - necessary for `rustc`. After this is built, the next stage of building - is handled in `Context`. - - * [`src/cargo/core/compiler/context`](https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/context/mod.rs) - โ€” The `Context` is the mutable state used during the build process. This - is the core of the build process, and everything is coordinated through - this. - - * [`src/cargo/core/compiler/fingerprint.rs`](https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/fingerprint.rs) - โ€” The `fingerprint` module contains all the code that handles detecting - if a crate needs to be recompiled. - -* [`src/cargo/core/source`](https://github.com/rust-lang/cargo/tree/master/src/cargo/core/source) - โ€” The `Source` trait is an abstraction over different sources of packages. - Sources are uniquely identified by a `SourceId`. Sources are implemented in - the - [`src/cargo/sources`](https://github.com/rust-lang/cargo/tree/master/src/cargo/sources) - directory. - -* [`src/cargo/util`](https://github.com/rust-lang/cargo/tree/master/src/cargo/util) - โ€” This directory contains generally-useful utility modules. - -* [`src/cargo/util/config`](https://github.com/rust-lang/cargo/tree/master/src/cargo/util/config) - โ€” This directory contains the config parser. It makes heavy use of - [serde](https://serde.rs/) to merge and translate config values. The - `Config` is usually accessed from the - [`Workspace`](https://github.com/rust-lang/cargo/blob/master/src/cargo/core/workspace.rs), - though references to it are scattered around for more convenient access. - -* [`src/cargo/util/toml`](https://github.com/rust-lang/cargo/tree/master/src/cargo/util/toml) - โ€” This directory contains the code for parsing `Cargo.toml` files. - - * [`src/cargo/ops/lockfile.rs`](https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/lockfile.rs) - โ€” This is where `Cargo.lock` files are loaded and saved. - -* [`src/doc`](https://github.com/rust-lang/cargo/tree/master/src/doc) - โ€” This directory contains Cargo's documentation and man pages. - -* [`src/etc`](https://github.com/rust-lang/cargo/tree/master/src/etc) - โ€” These are files that get distributed in the `etc` directory in the Rust release. - The man pages are auto-generated by a script in the `src/doc` directory. - -* [`crates`](https://github.com/rust-lang/cargo/tree/master/crates) - โ€” A collection of independent crates used by Cargo. - -## Extra crates - -Some functionality is split off into separate crates, usually in the -[`crates`](https://github.com/rust-lang/cargo/tree/master/crates) directory. - -* [`cargo-platform`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-platform) - โ€” This library handles parsing `cfg` expressions. -* [`cargo-test-macro`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-test-macro) - โ€” This is a proc-macro used by the test suite to define tests. More - information can be found at [`cargo_test` - attribute](../tests/writing.md#cargo_test-attribute). -* [`cargo-test-support`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-test-support) - โ€” This contains a variety of code to support [writing - tests](../tests/writing.md). -* [`cargo-util`](https://github.com/rust-lang/cargo/tree/master/crates/cargo-util) - โ€” This contains general utility code that is shared between cargo and the - testsuite. -* [`crates-io`](https://github.com/rust-lang/cargo/tree/master/crates/crates-io) - โ€” This contains code for accessing the crates.io API. -* [`credential`](https://github.com/rust-lang/cargo/tree/master/crates/credential) - โ€” This subdirectory contains several packages for implementing the - experimental - [credential-process](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#credential-process) - feature. -* [`mdman`](https://github.com/rust-lang/cargo/tree/master/crates/mdman) โ€” - This is a utility for generating cargo's man pages. See [Building the man - pages](https://github.com/rust-lang/cargo/tree/master/src/doc#building-the-man-pages) - for more information. -* [`resolver-tests`](https://github.com/rust-lang/cargo/tree/master/crates/resolver-tests) - โ€” This is a dedicated package that defines tests for the [dependency - resolver](../architecture/packages.md#resolver). diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/compilation.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/compilation.md deleted file mode 100644 index c88d6567a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/compilation.md +++ /dev/null @@ -1,39 +0,0 @@ -# Compilation - -The [`Unit`] is the primary data structure representing a single execution of -the compiler. It (mostly) contains all the information needed to determine -which flags to pass to the compiler. - -The entry to the compilation process is located in the [`cargo_compile`] -module. The compilation can be conceptually broken into these steps: - -1. Perform dependency resolution (see [the resolution chapter]). -2. Generate the root `Unit`s, the things the user requested to compile on the - command-line. This is done in [`generate_targets`]. -3. Starting from the root `Unit`s, generate the [`UnitGraph`] by walking the - dependency graph from the resolver. The `UnitGraph` contains all of the - `Unit` structs, and information about the dependency relationships between - units. This is done in the [`unit_dependencies`] module. -4. Construct the [`BuildContext`] with all of the information collected so - far. This is the end of the "front end" of compilation. -5. Create a [`Context`], a large, mutable data structure that coordinates the - compilation process. -6. The [`Context`] will create a [`JobQueue`], a data structure that tracks - which units need to be built. -7. [`drain_the_queue`] does the compilation process. This is the only point in - Cargo that currently uses threads. -8. The result of the compilation is stored in the [`Compilation`] struct. This - can be used for various things, such as running tests after the compilation - has finished. - -[`cargo_compile`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/cargo_compile.rs -[`generate_targets`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/ops/cargo_compile.rs#L725-L739 -[`UnitGraph`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/unit_graph.rs -[the resolution chapter]: packages.md -[`Unit`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/unit.rs -[`unit_dependencies`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/unit_dependencies.rs -[`BuildContext`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/build_context/mod.rs -[`Context`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/context/mod.rs -[`JobQueue`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/job_queue.rs -[`drain_the_queue`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/compiler/job_queue.rs#L623-L634 -[`Compilation`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/compilation.rs diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/console.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/console.md deleted file mode 100644 index 2c5412b8c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/console.md +++ /dev/null @@ -1,82 +0,0 @@ -# Console Output - -All of Cargo's output should go through the [`Shell`] struct. You can normally -obtain the `Shell` instance from the [`Config`] struct. Do **not** use the std -`println!` macros. - -Most of Cargo's output goes to stderr. When running in JSON mode, the output -goes to stdout. - -It is important to properly handle errors when writing to the console. -Informational commands, like `cargo list`, should ignore any errors writing -the output. There are some [`drop_print`] macros that are intended to make -this easier. - -Messages written during compilation should handle errors, and abort the build -if they are unable to be displayed. This is generally automatically handled in -the [`JobQueue`] as it processes each message. - -[`Shell`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/shell.rs -[`Config`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/config/mod.rs -[`drop_print`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/util/config/mod.rs#L1820-L1848 -[`JobQueue`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/job_queue.rs - -## Errors - -Cargo uses [`anyhow`] for managing errors. This makes it convenient to "chain" -errors together, so that Cargo can report how an error originated, and what it -was trying to do at the time. - -Error helpers are implemented in the [`errors`] module. Use the -`InternalError` error type for errors that are not expected to happen. This -will print a message to the user to file a bug report. - -The binary side of Cargo uses the `CliError` struct to wrap the process exit -code. Usually Cargo exits with 101 for an error, but some commands like `cargo -test` will exit with different codes. - -[`errors`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/errors.rs - -## Style - -Some guidelines for Cargo's output: - -* Keep the normal output brief. Cargo is already fairly noisy, so try to keep - the output as brief and clean as possible. -* Good error messages are very important! Try to keep them brief and to the - point, but good enough that a beginner can understand what is wrong and can - figure out how to fix. It is a difficult balance to hit! Err on the side of - providing extra information. -* When using any low-level routines, such as `std::fs`, *always* add error - context about what it is doing. For example, reading from a file should - include context about which file is being read if there is an error. -* Cargo's error style is usually a phrase, starting with a lowercase letter. - If there is a longer error message that needs multiple sentences, go ahead - and use multiple sentences. This should probably be improved sometime in the - future to be more structured. - -## Debug logging - -Cargo uses the [`env_logger`] crate to display debug log messages. The -`CARGO_LOG` environment variable can be set to enable debug logging, with a -value such as `trace`, `debug`, or `warn`. It also supports filtering for -specific modules. Feel free to use the standard [`log`] macros to help with -diagnosing problems. - -```sh -# Outputs all logs with levels debug and higher -CARGO_LOG=debug cargo generate-lockfile - -# Don't forget that you can filter by module as well -CARGO_LOG=cargo::core::resolver=trace cargo generate-lockfile - -# This will print lots of info about the download process. `trace` prints even more. -CARGO_HTTP_DEBUG=true CARGO_LOG=cargo::ops::registry=debug cargo fetch - -# This is an important command for diagnosing fingerprint issues. -CARGO_LOG=cargo::core::compiler::fingerprint=trace cargo build -``` - -[`env_logger`]: https://docs.rs/env_logger -[`log`]: https://docs.rs/log -[`anyhow`]: https://docs.rs/anyhow diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/files.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/files.md deleted file mode 100644 index 2e6e02b07..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/files.md +++ /dev/null @@ -1,67 +0,0 @@ -# Files - -This chapter gives some pointers on where to start looking at Cargo's on-disk -data file structures. - -* [`Layout`] is the abstraction for the `target` directory. It handles locking - the target directory, and providing paths to the parts inside. There is a - separate `Layout` for each "target". -* [`Resolve`] contains the contents of the `Cargo.lock` file. See the [`encode`] - module for the different `Cargo.lock` formats. -* [`TomlManifest`] contains the contents of the `Cargo.toml` file. It is translated - to a [`Manifest`] object for some simplification, and the `Manifest` is stored - in a [`Package`]. -* The [`fingerprint`] module deals with the fingerprint information stored in - `target/debug/.fingerprint`. This tracks whether or not a crate needs to be - rebuilt. -* `cargo install` tracks its installed files with some metadata in - `$CARGO_HOME`. The metadata is managed in the - [`common_for_install_and_uninstall`] module. -* Git sources are cached in `$CARGO_HOME/git`. The code for this cache is in - the [`git`] source module. -* Registries are cached in `$CARGO_HOME/registry`. There are three parts, the - index, the compressed `.crate` files, and the extracted sources of those - crate files. - * Management of the registry cache can be found in the [`registry`] source - module. Note that this includes an on-disk cache as an optimization for - accessing the git repository. - * Saving of `.crate` files is handled by the [`RemoteRegistry`]. - * Extraction of `.crate` files is handled by the [`RegistrySource`]. - * There is a lock for the package cache. Code must be careful, because - this lock must be obtained manually. See - [`Config::acquire_package_cache_lock`]. - -[`Layout`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/layout.rs -[`Resolve`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/resolver/resolve.rs -[`encode`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/resolver/encode.rs -[`TomlManifest`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/toml/mod.rs -[`Manifest`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/manifest.rs -[`Package`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/package.rs -[`common_for_install_and_uninstall`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/common_for_install_and_uninstall.rs -[`git`]: https://github.com/rust-lang/cargo/tree/master/src/cargo/sources/git -[`registry`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/sources/registry/mod.rs -[`RemoteRegistry`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/sources/registry/remote.rs -[`RegistrySource`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/sources/registry/mod.rs -[`Config::acquire_package_cache_lock`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/util/config/mod.rs#L1261-L1266 - -## Filesystems - -Cargo tends to get run on a very wide array of file systems. Different file -systems can have a wide range of capabilities, and Cargo should strive to do -its best to handle them. Some examples of issues to deal with: - -* Not all file systems support locking. Cargo tries to detect if locking is - supported, and if not, will ignore lock errors. This isn't ideal, but it is - difficult to deal with. -* The [`fs::canonicalize`] function doesn't work on all file systems - (particularly some Windows file systems). If that function is used, there - should be a fallback if it fails. This function will also return `\\?\` - style paths on Windows, which can have some issues (such as some tools not - supporting them, or having issues with relative paths). -* Timestamps can be unreliable. The [`fingerprint`] module has a deeper - discussion of this. One example is that Docker cache layers will erase the - fractional part of the time stamp. -* Symlinks are not always supported, particularly on Windows. - -[`fingerprint`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/fingerprint.rs -[`fs::canonicalize`]: https://doc.rust-lang.org/std/fs/fn.canonicalize.html diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/index.md deleted file mode 100644 index fded5fc4b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/index.md +++ /dev/null @@ -1,8 +0,0 @@ -# Architecture Overview - -This chapter gives a very high-level overview of Cargo's architecture. This is -intended to give you links into the code which is hopefully commented with -more in-depth information. - -If you feel something is missing that would help you, feel free to ask on -Zulip. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/packages.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/packages.md deleted file mode 100644 index dc7deefbb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/packages.md +++ /dev/null @@ -1,92 +0,0 @@ -# Packages and Resolution - -## Workspaces - -The [`Workspace`] object is usually created very early by calling the -[`workspace`][ws-method] helper method. This discovers the root of the -workspace, and loads all the workspace members as a [`Package`] object. Each -package corresponds to a single `Cargo.toml` (which is deserialized into a -[`Manifest`]), and may define several [`Target`]s, such as the library, -binaries, integration test or examples. Targets are crates (each target -defines a crate root, like `src/lib.rs` or `examples/foo.rs`) and are what is -actually compiled by `rustc`. - -## Packages and Sources - -There are several data structures that are important to understand how -packages are found and loaded: - -* [`Package`] โ€” A package, which is a `Cargo.toml` manifest and its associated - source files. - * [`PackageId`] โ€” A unique identifier for a package. -* [`Source`] โ€” An abstraction for something that can fetch packages (a remote - registry, a git repo, the local filesystem, etc.). Check out the [source - implementations] for all the details about registries, indexes, git - dependencies, etc. - * [`SourceId`] โ€” A unique identifier for a source. -* [`SourceMap`] โ€” Map of all available sources. -* [`PackageRegistry`] โ€” This is the main interface for how the dependency - resolver finds packages. It contains the `SourceMap`, and handles things - like the `[patch]` table. The `Registry` trait provides a generic interface - to the `PackageRegistry`, but this is only used for providing an alternate - implementation of the `PackageRegistry` for testing. The dependency resolver - sends a query to the `PackageRegistry` to "get me all packages that match - this dependency declaration". -* [`Summary`] โ€” A summary is a subset of a [`Manifest`], and is essentially - the information that can be found in a registry index. Queries against the - `PackageRegistry` yields a `Summary`. The resolver uses the summary - information to build the dependency graph. -* [`PackageSet`] โ€” Contains all of the `Package` objects. This works with the - [`Downloads`] struct to coordinate downloading packages. It has a reference - to the `SourceMap` to get the `Source` objects which tell the `Downloads` - struct which URLs to fetch. - -All of these come together in the [`ops::resolve`] module. This module -contains the primary functions for performing resolution (described below). It -also handles downloading of packages. It is essentially where all of the data -structures above come together. - -## Resolver - -[`Resolve`] is the representation of a directed graph of package dependencies, -which uses [`PackageId`]s for nodes. This is the data structure that is saved -to the `Cargo.lock` file. If there is no lock file, Cargo constructs a resolve -by finding a graph of packages which matches declared dependency specification -according to SemVer. - -[`ops::resolve`] is the front-end for creating a `Resolve`. It handles loading -the `Cargo.lock` file, checking if it needs updating, etc. - -Resolution is currently performed twice. It is performed once with all -features enabled. This is the resolve that gets saved to `Cargo.lock`. It then -runs again with only the specific features the user selected on the -command-line. Ideally this second run will get removed in the future when -transitioning to the new feature resolver. - -### Feature resolver - -A new feature-specific resolver was added in 2020 which adds more -sophisticated feature resolution. It is located in the [`resolver::features`] -module. The original dependency resolver still performs feature unification, -as it can help reduce the dependencies it has to consider during resolution -(rather than assuming every optional dependency of every package is enabled). -Checking if a feature is enabled must go through the new feature resolver. - - -[`Workspace`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/workspace.rs -[ws-method]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/util/command_prelude.rs#L298-L318 -[`Package`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/package.rs -[`Target`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/manifest.rs#L181-L206 -[`Manifest`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/manifest.rs#L27-L51 -[`Source`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/source/mod.rs -[`SourceId`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/source/source_id.rs -[`SourceMap`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/source/mod.rs#L245-L249 -[`PackageRegistry`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/registry.rs#L36-L81 -[`ops::resolve`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/resolve.rs -[`resolver::features`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/resolver/features.rs#L259 -[source implementations]: https://github.com/rust-lang/cargo/tree/master/src/cargo/sources -[`PackageId`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/package_id.rs -[`Summary`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/summary.rs -[`PackageSet`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/package.rs#L283-L296 -[`Downloads`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/src/cargo/core/package.rs#L298-L352 -[`Resolve`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/resolver/resolve.rs diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/subcommands.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/subcommands.md deleted file mode 100644 index bdb586c24..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/architecture/subcommands.md +++ /dev/null @@ -1,25 +0,0 @@ -# SubCommands - -Cargo is a single binary composed of a set of [`clap`] subcommands. All -subcommands live in [`src/bin/cargo/commands`] directory. -[`src/bin/cargo/main.rs`] is the entry point. - -Each subcommand, such as [`src/bin/cargo/commands/build.rs`], usually performs -the following: - -1. Parse the CLI flags. See the [`command_prelude`] module for some helpers to make this easier. -2. Load the config files. -3. Discover and load the workspace. -4. Calls the actual implementation of the subcommand which resides in [`src/cargo/ops`]. - -If the subcommand is not found in the built-in list, then Cargo will -automatically search for a subcommand named `cargo-{NAME}` in the users `PATH` -to execute the subcommand. - - -[`clap`]: https://clap.rs/ -[`src/bin/cargo/commands/build.rs`]: https://github.com/rust-lang/cargo/tree/master/src/bin/cargo/commands/build.rs -[`src/cargo/ops`]: https://github.com/rust-lang/cargo/tree/master/src/cargo/ops -[`src/bin/cargo/commands`]: https://github.com/rust-lang/cargo/tree/master/src/bin/cargo/commands -[`src/bin/cargo/main.rs`]: https://github.com/rust-lang/cargo/blob/master/src/bin/cargo/main.rs -[`command_prelude`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/util/command_prelude.rs diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/design.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/design.md deleted file mode 100644 index d51d3eb20..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/design.md +++ /dev/null @@ -1,101 +0,0 @@ -# Design Principles - -The purpose of Cargo is to formalize a canonical Rust workflow, by automating -the standard tasks associated with distributing software. Cargo simplifies -structuring a new project, adding dependencies, writing and running unit -tests, and more. - -Cargo is not intended to be a general-purpose build tool. Ideally, it should -be easy to integrate it within another build tool, though admittedly that is -not as seamless as desired. - -## Stability and compatibility - -### Backwards compatibility - -Cargo strives to remain backwards compatible with projects created in previous -versions. The CLI interface also strives to remain backwards compatible, such -that the commands and options behave the same. That being said, changes in -behavior, and even outright breakage are sometimes done in limited situations. -The following outlines some situations where backwards-incompatible changes are -made: - -* Anything that addresses a security concern. -* Dropping support for older platforms and tooling. Cargo follows the Rust - [tiered platform support]. -* Changes to resolve possibly unsafe or unreliable behavior. - -None of these changes should be taken lightly, and should be avoided if -possible, or possibly with some transition period to alert the user of the -potential change. - -Behavior is sometimes changed in ways that have a high confidence that it -won't break existing workflows. Almost every change carries this risk, so it -is often a judgment call balancing the benefit of the change with the -perceived possibility of its negative consequences. - -At times, some changes fall in the gray area, where the current behavior is -undocumented, or not working as intended. These are more difficult judgment -calls. The general preference is to balance towards avoiding breaking existing -workflows. - -Support for older registry APIs and index formats may be dropped, if there is -high confidence that there aren't any active registries that may be affected. -This has never (to my knowledge) happened so far, and is unlikely to happen in -the future, but remains a possibility. - -In all of the above, a transition period may be employed if a change is known -to cause breakage. A warning can be issued to alert the user that something -will change, and provide them with an alternative to resolve the issue -(preferably in a way that is compatible across versions if possible). - -Cargo is only expected to work with the version of the related Rust tools -(`rustc`, `rustdoc`, etc.) that it is released with. As a matter of choice, -the latest nightly works with the most recent stable release, but that is -mostly to accommodate development of Cargo itself, and should not be expected -by users. - -### Forwards compatibility - -Additionally, Cargo strives a limited degree of *forwards compatibility*. -Changes should not egregiously prevent older versions from working. This is -mostly relevant for persistent data, such as on-disk files and the registry -interface and index. It also applies to a lesser degree to the registry API. - -Changes to `Cargo.lock` require a transition time, where the new format is not -automatically written when the lock file is updated. The transition time -should not be less than 6 months, though preferably longer. New projects may -use the new format in a shorter time frame. - -Changes to `Cargo.toml` can be made in any release. This is because the user -must manually modify the file, and opt-in to any new changes. Additionally, -Cargo will usually only issue a warning about new fields it doesn't -understand, but otherwise continue to function. - -Changes to cache files (such as artifacts in the `target` directory, or cached -data in Cargo's home directory) should not *prevent* older versions from -running, but they may cause older versions to recreate the cache, which may -result in a performance impact. - -Changes to the registry index should not prevent older versions from working. -Generally, older versions ignore new fields, so the format should be easily -extensible. Changes to the format or interpretation of existing fields should -be done very carefully to avoid preventing older versions of Cargo from -working. In some cases, this may mean that older versions of Cargo will not be -able to *select* a newly published crate, but it shouldn't prevent them from -working at all. This level of compatibility may not last forever, but the -exact time frame for such a change has not yet been decided. - -The registry API may be changed in such a way to prevent older versions of -Cargo from working. Generally, compatibility should be retained for as long as -possible, but the exact length of time is not specified. - -## Simplicity and layers - -Standard workflows should be easy and consistent. Each knob that is added has -a high cost, regardless if it is intended for a small audience. Layering and -defaults can help avoid the surface area that the user needs to be concerned -with. Try to avoid small functionalities that may have complex interactions -with one another. - -[tiered platform support]: https://doc.rust-lang.org/nightly/rustc/platform-support.html diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/index.md deleted file mode 100644 index 5ab169ea7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/index.md +++ /dev/null @@ -1,29 +0,0 @@ -# Introduction - -Thank you for your interest in contributing to [Cargo]! This guide provides an -overview of how to contribute to Cargo, how to dive into the code, and how the -testing infrastructure works. - -There are many ways to contribute, such as [helping other users], [filing -issues], [improving the documentation], [fixing bugs], and working on [small] -and [large features]. - -If you have a general question about Cargo or its internals, feel free to ask -on [Zulip]. - -This guide assumes you have some familiarity with Rust, and how to use Cargo, -[rustup], and general development tools like [git]. - -Please also read the [Rust Code of Conduct]. - -[Cargo]: https://doc.rust-lang.org/cargo/ -[Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo -[Rust Code of Conduct]: https://www.rust-lang.org/policies/code-of-conduct -[helping other users]: https://users.rust-lang.org/ -[filing issues]: issues.md -[rustup]: https://rust-lang.github.io/rustup/ -[git]: https://git-scm.com/ -[improving the documentation]: https://github.com/rust-lang/cargo/tree/master/src/doc -[fixing bugs]: process/index.md#working-on-small-bugs -[small]: process/index.md#working-on-small-features -[large features]: process/index.md#working-on-large-features diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/issues.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/issues.md deleted file mode 100644 index 30e2d8316..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/issues.md +++ /dev/null @@ -1,109 +0,0 @@ -# Issue Tracker - -Cargo's issue tracker is located at -. This is the primary spot where -we track bugs and small feature requests. See [Process] for more about our -process for proposing changes. - -## Filing issues - -We can't fix what we don't know about, so please report problems liberally. -This includes problems with understanding the documentation, unhelpful error -messages, and unexpected behavior. - -**If you think that you have identified an issue with Cargo that might -compromise its users' security, please do not open a public issue on GitHub. -Instead, we ask you to refer to Rust's [security policy].** - -Opening an issue is as easy as following [this link][new-issues]. There are -several templates for different issue kinds, but if none of them fit your -issue, don't hesitate to modify one of the templates, or click the [Open a -blank issue] link. - -The Rust tools are spread across multiple repositories in the Rust -organization. It may not always be clear where to file an issue. No worries! -If you file in the wrong tracker, someone will either transfer it to the -correct one or ask you to move it. Some other repositories that may be -relevant are: - -* [`rust-lang/rust`] โ€” Home for the [`rustc`] compiler and [`rustdoc`]. -* [`rust-lang/rustup`] โ€” Home for the [`rustup`] toolchain installer. -* [`rust-lang/rustfmt`] โ€” Home for the `rustfmt` tool, which also includes `cargo fmt`. -* [`rust-lang/rust-clippy`] โ€” Home for the `clippy` tool, which also includes `cargo clippy`. -* [`rust-lang/crates.io`] โ€” Home for the [crates.io] website. - -Issues with [`cargo fix`] can be tricky to know where they should be filed, -since the fixes are driven by `rustc`, processed by [`rustfix`], and the -front-interface is implemented in Cargo. Feel free to file in the Cargo issue -tracker, and it will get moved to one of the other issue trackers if -necessary. - -[Process]: process/index.md -[security policy]: https://www.rust-lang.org/security.html -[new-issues]: https://github.com/rust-lang/cargo/issues/new/choose -[Open a blank issue]: https://github.com/rust-lang/cargo/issues/new -[`rust-lang/rust`]: https://github.com/rust-lang/rust -[`rust-lang/rustup`]: https://github.com/rust-lang/rustup -[`rust-lang/rustfmt`]: https://github.com/rust-lang/rustfmt -[`rust-lang/rust-clippy`]: https://github.com/rust-lang/rust-clippy -[`rustc`]: https://doc.rust-lang.org/rustc/ -[`rustdoc`]: https://doc.rust-lang.org/rustdoc/ -[`rustup`]: https://rust-lang.github.io/rustup/ -[`rust-lang/crates.io`]: https://github.com/rust-lang/crates.io -[crates.io]: https://crates.io/ -[`rustfix`]: https://github.com/rust-lang/rustfix/ -[`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html - -## Issue labels - -[Issue labels] are very helpful to identify the types of issues and which -category they are related to. The Cargo team typically manages assigning -labels. The labels use a naming convention with short prefixes and colors to -indicate the kind of label: - -* Yellow, **A**-prefixed labels state which **area** of the project an issue - relates to. - -* Light purple, **C**-prefixed labels represent the **category** of an issue. - In particular, **[C-feature-request]** marks *proposals* for new features. If - an issue is **C-feature-request**, but is not **[Feature accepted]** or - **[I-nominated]**, then it was not thoroughly discussed, and might need some - additional design or perhaps should be implemented as an external subcommand - first. Ping @rust-lang/cargo if you want to send a PR for such issue. - -* Dark purple, **Command**-prefixed labels mean the issue has to do with a - specific cargo command. - -* Green, **E**-prefixed labels indicate the level of **experience** or - **effort** necessary to fix the issue. **[E-mentor]** issues also - have some instructions on how to get started. Generally, all of the - **E**-prefixed labels are issues that are ready for someone to contribute - to! - -* Red, **I**-prefixed labels indicate the **importance** of the issue. The - **[I-nominated]** label indicates that an issue has been nominated for - prioritizing at the next triage meeting. - -* Purple gray, **O**-prefixed labels are the **operating system** or platform - that this issue is specific to. - -* Orange, **P**-prefixed labels indicate a bug's **priority**. - -* **S**-prefixed labels are "status" labels, typically used for PRs, but can - also indicate an issue is **[S-blocked]**. - -* The light orange **[relnotes]** label marks issues that should be highlighted - in the [Rust release notes] of the next release. - -* Dark blue, **Z**-prefixed labels are for unstable, [nightly features]. - -[Issue labels]: https://github.com/rust-lang/cargo/labels -[E-easy]: https://github.com/rust-lang/cargo/labels/E-easy -[E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor -[I-nominated]: https://github.com/rust-lang/cargo/labels/I-nominated -[C-feature-request]: https://github.com/rust-lang/cargo/labels/C-feature-request -[Feature accepted]: https://github.com/rust-lang/cargo/labels/Feature%20accepted -[S-blocked]: https://github.com/rust-lang/cargo/labels/S-blocked -[Rust release notes]: https://github.com/rust-lang/rust/blob/master/RELEASES.md -[nightly features]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html -[relnotes]: https://github.com/rust-lang/cargo/issues?q=label%3Arelnotes diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/index.md deleted file mode 100644 index bb40bf872..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/index.md +++ /dev/null @@ -1,124 +0,0 @@ -# Process - -This chapter gives an overview of how Cargo comes together, and how you can be -a part of that process. - -See the [Working on Cargo] chapter for an overview of the contribution -process. - -[Working on Cargo]: working-on-cargo.md - -## Cargo team - -Cargo is managed by a [team] of volunteers. The Cargo Team reviews all -changes, and sets the direction for the project. - -The team meets on a weekly basis on a video chat. If you are interested in -participating, feel free to contact us on [Zulip]. - -## Roadmap - -The Cargo team typically establishes a roadmap each year that sets which areas -they will be focusing on. This is usually posted on the Inside Rust Blog (such -as [the 2020 roadmap]). - -The [Roadmap Project Board] is used for tracking major initiatives. This gives -an overview of the things the team is interested in and thinking about. - -The [RFC Project Board] is used for tracking [RFCs]. - -[the 2020 roadmap]: https://blog.rust-lang.org/inside-rust/2020/01/10/cargo-in-2020.html -[Roadmap Project Board]: https://github.com/rust-lang/cargo/projects/1 -[RFC Project Board]: https://github.com/rust-lang/cargo/projects/2 -[RFCs]: https://github.com/rust-lang/rfcs/ - -## Working on small bugs - -Issues labeled with the [E-help-wanted], [E-easy], or [E-mentor] [labels] are -typically issues that the Cargo team wants to see addressed, and are -relatively easy to get started with. If you are interested in one of those, -and it has not already been assigned to someone, leave a comment. See [Issue -assignment](#issue-assignment) below for assigning yourself. - -If there is a specific issue that you are interested in, but it doesn't have -one of the `E-` labels, leave a comment on the issue. If a Cargo team member -has the time to help out, they will respond to help with the next steps. - -[E-help-wanted]: https://github.com/rust-lang/cargo/labels/E-help-wanted -[E-easy]: https://github.com/rust-lang/cargo/labels/E-easy -[E-mentor]: https://github.com/rust-lang/cargo/labels/E-mentor -[labels]: ../issues.md#issue-labels - -## Working on large bugs - -Some issues may be difficult to fix. They may require significant code -changes, or major design decisions. The [E-medium] and [E-hard] [labels] can -be used to tag such issues. These will typically involve some discussion with -the Cargo team on how to tackle it. - -[E-medium]: https://github.com/rust-lang/cargo/labels/E-medium -[E-hard]: https://github.com/rust-lang/cargo/labels/E-hard - -## Working on small features - -Small feature requests are typically managed on the [issue -tracker][issue-feature-request]. Features that the Cargo team have approved -will have the [Feature accepted] label or the [E-mentor] label. If there is a -feature request that you are interested in, feel free to leave a comment -expressing your interest. If a Cargo team member has the time to help out, -they will respond to help with the next steps. Keep in mind that the Cargo -team has limited time, and may not be able to help with every feature request. -Most of them require some design work, which can be difficult. Check out the -[design principles chapter] for some guidance. - -## Working on large features - -Cargo follows the Rust model of evolution. Major features usually go through -an [RFC process]. Therefore, before opening a feature request issue create a -Pre-RFC thread on the [internals][irlo] forum to get preliminary feedback. -Implementing a feature as a [custom subcommand][subcommands] is encouraged as -it helps demonstrate the demand for the functionality and is a great way to -deliver a working solution faster as it can iterate outside of Cargo's release -cadence. - -See the [unstable chapter] for how new major features are typically -implemented. - -[unstable chapter]: unstable.md - -## Bots and infrastructure - -The Cargo project uses several bots: - -* [GitHub Actions] are used to automatically run all tests for each PR. -* [rust-highfive] automatically assigns reviewers for PRs. -* [bors] is used to merge PRs. See [The merging process]. -* [triagebot] is used for assigning issues to non-members, see [Issue - assignment](#issue-assignment). -* [rfcbot] is used for making asynchronous decisions by team members. - -[rust-highfive]: https://github.com/rust-highfive -[bors]: https://buildbot2.rust-lang.org/homu/ -[The merging process]: working-on-cargo.md#the-merging-process -[GitHub Actions]: https://github.com/features/actions -[triagebot]: https://github.com/rust-lang/triagebot/wiki -[rfcbot]: https://github.com/rust-lang/rfcbot-rs - -## Issue assignment - -Normally, if you plan to work on an issue that has been marked with one of the -`E-` tags or [Feature accepted], it is sufficient just to leave a comment that -you are working on it. We also have a bot that allows you to formally "claim" -an issue by entering the text `@rustbot claim` in a comment. See the -[Assignment] docs on how this works. - - -[Assignment]: https://github.com/rust-lang/triagebot/wiki/Assignment -[team]: https://www.rust-lang.org/governance/teams/dev-tools#cargo -[Zulip]: https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo -[issue-feature-request]: https://github.com/rust-lang/cargo/labels/C-feature-request -[Feature accepted]: https://github.com/rust-lang/cargo/labels/Feature%20accepted -[design principles chapter]: ../design.md -[RFC process]: https://github.com/rust-lang/rfcs/ -[irlo]: https://internals.rust-lang.org/ -[subcommands]: https://doc.rust-lang.org/cargo/reference/external-tools.html#custom-subcommands diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/release.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/release.md deleted file mode 100644 index e6fbcaf8f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/release.md +++ /dev/null @@ -1,109 +0,0 @@ -# Release process - -Cargo is released with `rustc` using a ["train model"][choochoo]. After a -change lands in Cargo's master branch, it will be synced with the -[rust-lang/rust] repository by a Cargo team member, which happens about once a -week. If there are complications, it can take longer. After it is synced and -merged, the changes will appear in the next nightly release, which is usually -published around 00:30 UTC. - -After changes are in the nightly release, they will make their way to the -stable release anywhere from 6 to 12 weeks later, depending on when during the -cycle it landed. - -The current release schedule is posted on the [Rust Forge]. See the [release -process] for more details on how Rust's releases are created. Rust releases -are managed by the [Release team]. - -[Rust Forge]: https://forge.rust-lang.org/ - -## Build process - -The build process for Cargo is handled as part of building Rust. Every PR on -the [rust-lang/rust] repository creates a full collection of release artifacts -for every platform. The code for this is in the [`dist` bootstrap module]. -Every night at 00:00 UTC, the artifacts from the most recently merged PR are -promoted to the nightly release channel. A similar process happens for beta -and stable releases. - -[`dist` bootstrap module]: https://github.com/rust-lang/rust/blob/master/src/bootstrap/dist.rs - -## Version updates - -Shortly after each major release, a Cargo team member will post a PR to update -Cargo's version in `Cargo.toml`. Cargo's library is permanently unstable, so -its version number starts with a `0`. The minor version is always 1 greater -than the Rust release it is a part of, so cargo 0.49.0 is part of the 1.48 -Rust release. The [CHANGELOG] is also usually updated at this time. - -Also, any version-specific checks that are no longer needed can be removed. -For example, some tests are disabled on stable if they require some nightly -behavior. Once that behavior is available on the new stable release, the -checks are no longer necessary. (I usually search for the word "nightly" in -the testsuite directory, and read the comments to see if any of those nightly -checks can be removed.) - -Sometimes Cargo will have a runtime check to probe `rustc` if it supports a -specific feature. This is usually stored in the [`TargetInfo`] struct. If this -behavior is now stable, those checks should be removed. - -Cargo has several other packages in the [`crates/` directory]. If any of these -packages have changed, the version should be bumped **before the beta -release**. It is rare that these get updated. Bumping these as-needed helps -avoid churning incompatible version numbers. This process should be improved -in the future! - -[`crates/` directory]: https://github.com/rust-lang/cargo/tree/master/crates - -## Docs publishing - -Docs are automatically published during the Rust release process. The nightly -channel's docs appear at . Once -nightly is promoted to beta, those docs will appear at -. Once the stable release is made, it -will appear on (which is the "current" -stable) and the release-specific URL such as -. - -The code that builds the documentation is located in the [`doc` bootstrap -module]. - -[`doc` bootstrap module]: https://github.com/rust-lang/rust/blob/master/src/bootstrap/doc.rs - -## crates.io publishing - -Cargo's library is published to [crates.io] as part of the stable release -process. This is handled by the [Release team] as part of their process. There -is a [`publish.py` script] that in theory should help with this process. The -test and build tool crates aren't published. - -[`publish.py` script]: https://github.com/rust-lang/cargo/blob/master/publish.py - -## Beta backports - -If there is a regression or major problem detected during the beta phase, it -may be necessary to backport a fix to beta. The process is documented in the -[Beta Backporting] page. - -[Beta Backporting]: https://forge.rust-lang.org/release/beta-backporting.html - -## Stable backports - -In (hopefully!) very rare cases, a major regression or problem may be reported -after the stable release. Decisions about this are usually coordinated between -the [Release team] and the Cargo team. There is usually a high bar for making -a stable patch release, and the decision may be influenced by whether or not -there are other changes that need a new stable release. - -The process here is similar to the beta-backporting process. The -[rust-lang/cargo] branch is the same as beta (`rust-1.XX.0`). The -[rust-lang/rust] branch is called `stable`. - -[choochoo]: https://doc.rust-lang.org/book/appendix-07-nightly-rust.html -[rust-lang/rust]: https://github.com/rust-lang/rust/ -[rust-lang/cargo]: https://github.com/rust-lang/cargo/ -[CHANGELOG]: https://github.com/rust-lang/cargo/blob/master/CHANGELOG.md -[release process]: https://forge.rust-lang.org/release/process.html -[`TargetInfo`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/build_context/target_info.rs -[crates.io]: https://crates.io/ -[release team]: https://www.rust-lang.org/governance/teams/operations#release diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/unstable.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/unstable.md deleted file mode 100644 index 522b1176a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/unstable.md +++ /dev/null @@ -1,83 +0,0 @@ -# Unstable features - -Most new features should go through the unstable process. This means that the -feature will only be usable on the nightly channel, and requires a specific -opt-in by the user. Small changes can skip this process, but please consult -with the Cargo team first. - -## Unstable feature opt-in - -For features that require behavior changes or new syntax in `Cargo.toml`, then -it will need a `cargo-features` value placed at the top of `Cargo.toml` to -enable it. The process for doing adding a new feature is described in the -[`features` module]. Code that implements the feature will need to manually -check that the feature is enabled for the current manifest. - -For features that add new command-line flags, config options, or environment -variables, then the `-Z` flags will be needed to enable them. The [`features` -module] also describes how to add these. New flags should use the -`fail_if_stable_opt` method to check if the `-Z unstable-options` flag has -been passed. - -## Unstable documentation - -Every unstable feature should have a section added to the [unstable chapter] -describing how to use the feature. - -`-Z` CLI flags should be documented in the built-in help in the [`cli` -module]. - -[unstable chapter]: https://github.com/rust-lang/cargo/blob/master/src/doc/src/reference/unstable.md -[`cli` module]: https://github.com/rust-lang/cargo/blob/master/src/bin/cargo/cli.rs - -## Tracking issues - -Each unstable feature should get a [tracking issue]. These issues are -typically created when a PR is close to being merged, or soon after it is -merged. Use the [tracking issue template] when creating a tracking issue. - -Larger features should also get a new label in the issue tracker so that when -issues are filed, they can be easily tied together. - -[tracking issue]: https://github.com/rust-lang/cargo/labels/C-tracking-issue -[tracking issue template]: https://github.com/rust-lang/cargo/issues/new?labels=C-tracking-issue&template=tracking_issue.md - -## Stabilization - -After some period of time, typically measured in months, the feature can be -considered to be stabilized. The feature should not have any significant known -bugs or issues, and any design concerns should be resolved. - -The stabilization process depends on the kind of feature. For smaller -features, you can leave a comment on the tracking issue expressing interest in -stabilizing it. It can usually help to indicate that the feature has received -some real-world testing, and has exhibited some demand for broad use. - -For larger features that have not gone through the [RFC process], then an RFC -to call for stabilization might be warranted. This gives the community a final -chance to provide feedback about the proposed design. - -For a small feature, or one that has already gone through the RFC process, a -Cargo Team member may decide to call for a "final comment period" using -[rfcbot]. This is a public signal that a major change is being made, and gives -the Cargo Team members an opportunity to confirm or block the change. This -process can take a few days or weeks, or longer if a concern is raised. - -Once the stabilization has been approved, the person who called for -stabilization should prepare a PR to stabilize the feature. This PR should: - -* Flip the feature to stable in the [`features` module]. -* Remove any unstable checks that aren't automatically handled by the feature - system. -* Move the documentation from the [unstable chapter] into the appropriate - places in the Cargo book and man pages. -* Remove the `-Z` flags and help message if applicable. -* Update all tests to remove nightly checks. -* Tag the PR with [relnotes] label if it seems important enough to highlight - in the [Rust release notes]. - -[`features` module]: https://github.com/rust-lang/cargo/blob/master/src/cargo/core/features.rs -[RFC process]: https://github.com/rust-lang/rfcs/ -[rfcbot]: https://github.com/rust-lang/rfcbot-rs -[Rust release notes]: https://github.com/rust-lang/rust/blob/master/RELEASES.md -[relnotes]: https://github.com/rust-lang/cargo/issues?q=label%3Arelnotes diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/working-on-cargo.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/working-on-cargo.md deleted file mode 100644 index b2e002bc4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/process/working-on-cargo.md +++ /dev/null @@ -1,145 +0,0 @@ -# Working on Cargo - -This chapter gives an overview of how to build Cargo, make a change, and -submit a Pull Request. - -1. [Check out the Cargo source.](#checkout-out-the-source) -2. [Building Cargo.](#building-cargo) -3. [Making a change.](#making-a-change) -4. [Writing and running tests.](../tests/index.md) -5. [Submitting a Pull Request.](#submitting-a-pull-request) -6. [The merging process.](#the-merging-process) - -## Checkout out the source - -We use the "fork and pull" model [described here][development-models], where -contributors push changes to their personal fork and [create pull requests] to -bring those changes into the source repository. Cargo uses [git] and [GitHub] -for all development. - -1. Fork the [`rust-lang/cargo`] repository on GitHub to your personal account - (see [GitHub docs][how-to-fork]). -2. Clone your fork to your local machine using `git clone` (see [GitHub - docs][how-to-clone]) -3. It is recommended to start a new branch for the change you want to make. - All Pull Requests are made against the master branch. - -## Building Cargo - -Cargo is built by...running `cargo`! There are a few prerequisites that you -need to have installed: - -* `rustc` and `cargo` need to be installed. Cargo is expected to build and - test with the current stable, beta, and nightly releases. It is your choice - which to use. Nightly is recommended, since some nightly-specific tests are - disabled when using the stable release. But using stable is fine if you - aren't working on those. -* A C compiler (typically gcc, clang, or MSVC). -* [git] -* Unix: - * pkg-config - * OpenSSL (`libssl-dev` on Ubuntu, `openssl-devel` on Fedora) -* macOS: - * OpenSSL ([homebrew] is recommended to install the `openssl` package) - -If you can successfully run `cargo build`, you should be good to go! - -[homebrew]: https://brew.sh/ - -## Running Cargo - -You can use `cargo run` to run cargo itself, or you can use the path directly -to the cargo binary, such as `target/debug/cargo`. - -If you are using [`rustup`], beware that running the binary directly can cause -issues with rustup overrides. Usually, when `cargo` is executed as part of -rustup, the toolchain becomes sticky (via an environment variable), and all -calls to `rustc` will use the same toolchain. But when `cargo` is not run via -rustup, the toolchain may change based on the directory. Since Cargo changes -the directory for each compilation, this can cause different calls to `rustc` -to use different versions. There are a few workarounds: - -* Don't use rustup overrides. -* Use `rustup run target/debug/cargo` to execute `cargo`. -* Set the `RUSTC` environment variable to a specific `rustc` executable (not - the rustup wrapper). -* Create a [custom toolchain]. This is a bit of a hack, but you can create a - directory in the rustup `toolchains` directory, and create symlinks for all - the files and directories in there to your toolchain of choice (such as - nightly), except for the `cargo` binary, which you can symlink to your - `target/debug/cargo` binary in your project directory. - -*Normally*, all development is done by running Cargo's test suite, so running -it directly usually isn't required. But it can be useful for testing Cargo on -more complex projects. - -[`rustup`]: https://rust-lang.github.io/rustup/ -[custom toolchain]: https://rust-lang.github.io/rustup/concepts/toolchains.html#custom-toolchains - -## Making a change - -Some guidelines on working on a change: - -* All code changes are expected to comply with the formatting suggested by - `rustfmt`. You can use `rustup component add rustfmt` to install `rustfmt` - and use `cargo fmt` to automatically format your code. -* Include tests that cover all non-trivial code. See the [Testing chapter] for - more about writing and running tests. -* All code should be warning-free. This is checked during tests. - -## Submitting a Pull Request - -After you have committed your work, and pushed it to GitHub, you can -open a Pull Request - -* Push your commits to GitHub and create a pull request against Cargo's - `master` branch. -* Include a clear description of what the change is and why it is being made. -* Use [GitHub's keywords] in the description to automatically link to an issue - if the PR resolves the issue. For example `Closes #1234` will link issue - #1234 to the PR. When the PR is merged, GitHub will automatically close the - issue. - -The [rust-highfive] bot will automatically assign a reviewer for the PR. It -may take at least a few days for someone to respond. If you don't get a -response in over a week, feel free to ping the assigned reviewer. - -When your PR is submitted, GitHub automatically runs all tests. The GitHub -interface will show a green checkmark if it passes, or a red X if it fails. -There are links to the logs on the PR page to diagnose any issues. The tests -typically finish in under 30 minutes. - -The reviewer might point out changes deemed necessary. Large or tricky changes -may require several passes of review and changes. - -## The merging process - -After a reviewer has approved your PR, they will issue a command to the [bors] -bot (also known as "Homu", the software that powers [`@bors`]). Bors will -create a temporary branch with your PR, and run all tests. Only if all tests -pass will it merge the PR to master. If it fails, the bot will leave a comment -on the PR. This system ensures that the master branch is always in a good -state, and that merges are processed one at a time. The [Homu queue -dashboard][homu-cargo] shows the current merge queue. Cargo's queue is rarely -busy, but a busy project like the [rust repo][homu-rust] is constantly full. - -Assuming everything works, congratulations! It may take at least a week for -the changes to arrive on the nightly channel. See the [release chapter] for -more information on how Cargo releases are made. - - -[development-models]: https://help.github.com/articles/about-collaborative-development-models/ -[create pull requests]: https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request -[how-to-fork]: https://docs.github.com/en/github/getting-started-with-github/fork-a-repo -[`rust-lang/cargo`]: https://github.com/rust-lang/cargo/ -[git]: https://git-scm.com/ -[GitHub]: https://github.com/ -[how-to-clone]: https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/cloning-a-repository -[Testing chapter]: ../tests/index.md -[GitHub's keywords]: https://docs.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue -[rust-highfive]: https://github.com/rust-highfive -[bors]: https://buildbot2.rust-lang.org/homu/ -[`@bors`]: https://github.com/bors -[homu-cargo]: https://buildbot2.rust-lang.org/homu/queue/cargo -[homu-rust]: https://buildbot2.rust-lang.org/homu/queue/rust -[release chapter]: release.md diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/index.md deleted file mode 100644 index dac047684..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/index.md +++ /dev/null @@ -1,20 +0,0 @@ -# Tests - -Cargo has an extensive test suite. Most of it is implemented as integration -tests in the [`testsuite`] directory. There are several other tests: - -* Unit tests are scattered throughout. -* The dependency resolver has its own set of tests in the [`resolver-tests`] - directory. -* All of the packages in the [`crates`] directory have their own set of tests. -* The [`build-std`] test is for the [build-std feature]. It is separate since - it has some special requirements. -* Documentation has a variety of tests, such as link validation, and the - [SemVer chapter validity checks]. - -[`testsuite`]: https://github.com/rust-lang/cargo/tree/master/tests/testsuite/ -[`resolver-tests`]: https://github.com/rust-lang/cargo/tree/master/crates/resolver-tests -[`crates`]: https://github.com/rust-lang/cargo/tree/master/crates -[`build-std`]: https://github.com/rust-lang/cargo/blob/master/tests/build-std/main.rs -[build-std feature]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std -[SemVer chapter validity checks]: https://github.com/rust-lang/cargo/tree/master/src/doc/semver-check diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/profiling.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/profiling.md deleted file mode 100644 index 1cc980ca3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/profiling.md +++ /dev/null @@ -1,40 +0,0 @@ -# Benchmarking and Profiling - -## Internal profiler - -Cargo has a basic, hierarchical profiler built-in. The environment variable -`CARGO_PROFILE` can be set to an integer which specifies how deep in the -profile stack to print results for. - -```sh -# Output first three levels of profiling info -CARGO_PROFILE=3 cargo generate-lockfile -``` - -## Benchmarking - -### Benchsuite - -Head over to the [`benches` -directory](https://github.com/rust-lang/cargo/tree/master/benches) for more -information about the benchmarking suite. - -### Informal benchmarking - -The overhead for starting a build should be kept as low as possible -(preferably, well under 0.5 seconds on most projects and systems). Currently, -the primary parts that affect this are: - -* Running the resolver. -* Querying the index. -* Checking git dependencies. -* Scanning the local project. -* Building the unit dependency graph. - -One way to test this is to use [hyperfine]. This is a tool that can be used to -measure the difference between different commands and settings. Usually this -is done by measuring the time it takes for `cargo build` to finish in a large -project where the build is fresh (no actual compilation is performed). Just -run `cargo build` once before using hyperfine. - -[hyperfine]: https://github.com/sharkdp/hyperfine diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/running.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/running.md deleted file mode 100644 index b2c4659b4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/running.md +++ /dev/null @@ -1,41 +0,0 @@ -# Running Tests - -Using `cargo test` is usually sufficient for running the full test suite. This -can take a few minutes, so you may want to use more targeted flags to pick the -specific test you want to run, such as `cargo test --test testsuite --- check::check_success`. - -## Running nightly tests - -Some tests only run on the nightly toolchain, and will be ignored on other -channels. It is recommended that you run tests with both nightly and stable to -ensure everything is working as expected. - -Some of the nightly tests require the `rustc-dev` and `llvm-tools-preview` -rustup components installed. These components include the compiler as a -library. This may already be installed with your nightly toolchain, but if it -isn't, run `rustup component add rustc-dev llvm-tools-preview ---toolchain=nightly`. - -## Running cross tests - -Some tests exercise cross compiling to a different target. This will require -you to install the appropriate target. This typically is the 32-bit target of -your host platform. For example, if your host is a 64-bit -`x86_64-unknown-linux-gnu`, then you should install the 32-bit target with -`rustup target add i686-unknown-linux-gnu`. If you don't have the alternate -target installed, there should be an error message telling you what to do. You -may also need to install additional tools for the target. For example, on Ubuntu -you should install the `gcc-multilib` package. - -If you can't install an alternate target, you can set the -`CFG_DISABLE_CROSS_TESTS=1` environment variable to disable these tests. The -Windows cross tests only support the MSVC toolchain. - -## Running build-std tests - -The `build-std` tests are disabled by default, but you can run them by setting -the `CARGO_RUN_BUILD_STD_TESTS=1` environment variable and running `cargo test ---test build-std`. This requires the nightly channel, and also requires the -`rust-src` component installed with `rustup component add rust-src ---toolchain=nightly`. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/writing.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/writing.md deleted file mode 100644 index 3d9e1b267..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/contrib/src/tests/writing.md +++ /dev/null @@ -1,161 +0,0 @@ -# Writing Tests - -The following focuses on writing an integration test. However, writing unit -tests is also encouraged! - -## Testsuite - -Cargo has a wide variety of integration tests that execute the `cargo` binary -and verify its behavior, located in the [`testsuite`] directory. The -[`support`] crate contains many helpers to make this process easy. - -These tests typically work by creating a temporary "project" with a -`Cargo.toml` file, executing the `cargo` binary process, and checking the -stdout and stderr output against the expected output. - -### `cargo_test` attribute - -Cargo's tests use the `#[cargo_test]` attribute instead of `#[test]`. This -attribute injects some code which does some setup before starting the test, -creating the little "sandbox" described below. - -### Basic test structure - -The general form of a test involves creating a "project", running `cargo`, and -checking the result. Projects are created with the [`ProjectBuilder`] where -you specify some files to create. The general form looks like this: - -```rust,ignore -let p = project() - .file("src/main.rs", r#"fn main() { println!("hi!"); }"#) - .build(); -``` - -The project creates a mini sandbox under the "cargo integration test" -directory with each test getting a separate directory such as -`/path/to/cargo/target/cit/t123/`. Each project appears as a separate -directory. There is also an empty `home` directory created that will be used -as a home directory instead of your normal home directory. - -If you do not specify a `Cargo.toml` manifest using `file()`, one is -automatically created with a project name of `foo` using `basic_manifest()`. - -To run Cargo, call the `cargo` method and make assertions on the execution: - -```rust,ignore -p.cargo("run --bin foo") - .with_stderr( - "\ -[COMPILING] foo [..] -[FINISHED] [..] -[RUNNING] `target/debug/foo` -", - ) - .with_stdout("hi!") - .run(); -``` - -This uses the [`Execs`] struct to build up a command to execute, along with -the expected output. - -See [`support::compare`] for an explanation of the string pattern matching. -Patterns are used to make it easier to match against the expected output. - -Browse the `pub` functions and modules in the [`support`] crate for a variety -of other helpful utilities. - -### Testing Nightly Features - -If you are testing a Cargo feature that only works on "nightly" Cargo, then -you need to call `masquerade_as_nightly_cargo` on the process builder like -this: - -```rust,ignore -p.cargo("build").masquerade_as_nightly_cargo() -``` - -If you are testing a feature that only works on *nightly rustc* (such as -benchmarks), then you should exit the test if it is not running with nightly -rust, like this: - -```rust,ignore -if !is_nightly() { - // Add a comment here explaining why this is necessary. - return; -} -``` - -### Platform-specific Notes - -When checking output, use `/` for paths even on Windows: the actual output -of `\` on Windows will be replaced with `/`. - -Be careful when executing binaries on Windows. You should not rename, delete, -or overwrite a binary immediately after running it. Under some conditions -Windows will fail with errors like "directory not empty" or "failed to remove" -or "access is denied". - -### Specifying Dependencies - -You should not write any tests that use the network such as contacting -crates.io. Typically, simple path dependencies are the easiest way to add a -dependency. Example: - -```rust,ignore -let p = project() - .file("Cargo.toml", r#" - [package] - name = "foo" - version = "1.0.0" - - [dependencies] - bar = {path = "bar"} - "#) - .file("src/lib.rs", "extern crate bar;") - .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) - .file("bar/src/lib.rs", "") - .build(); -``` - -If you need to test with registry dependencies, see -[`support::registry::Package`] for creating packages you can depend on. - -If you need to test git dependencies, see [`support::git`] to create a git -dependency. - -## Debugging tests - -In some cases, you may need to dig into a test that is not working as you -expect, or you just generally want to experiment within the sandbox -environment. The general process is: - -1. Build the sandbox for the test you want to investigate. For example: - - `cargo test --test testsuite -- features2::inactivate_targets`. -2. In another terminal, head into the sandbox directory to inspect the files and run `cargo` directly. - 1. The sandbox directories start with `t0` for the first test. - - `cd target/tmp/cit/t0` - 2. Set up the environment so that the sandbox configuration takes effect: - - `export CARGO_HOME=$(pwd)/home/.cargo` - 3. Most tests create a `foo` project, so head into that: - - `cd foo` -3. Run whatever cargo command you want. See [Running Cargo] for more details - on running the correct `cargo` process. Some examples: - - * `/path/to/my/cargo/target/debug/cargo check` - * Using a debugger like `lldb` or `gdb`: - 1. `lldb /path/to/my/cargo/target/debug/cargo` - 2. Set a breakpoint, for example: `b generate_targets` - 3. Run with arguments: `r check` - -[`testsuite`]: https://github.com/rust-lang/cargo/tree/master/tests/testsuite/ -[`ProjectBuilder`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/crates/cargo-test-support/src/lib.rs#L225-L231 -[`Execs`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/crates/cargo-test-support/src/lib.rs#L558-L579 -[`support`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/lib.rs -[`support::compare`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/compare.rs -[`support::registry::Package`]: https://github.com/rust-lang/cargo/blob/e4b65bdc80f2a293447f2f6a808fa7c84bf9a357/crates/cargo-test-support/src/registry.rs#L73-L149 -[`support::git`]: https://github.com/rust-lang/cargo/blob/master/crates/cargo-test-support/src/git.rs -[Running Cargo]: ../process/working-on-cargo.md#running-cargo diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-bench.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-bench.md deleted file mode 100644 index ded8e2b36..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-bench.md +++ /dev/null @@ -1,157 +0,0 @@ -# cargo-bench(1) -{{*set actionverb="Benchmark"}} -{{*set nouns="benchmarks"}} - -## NAME - -cargo-bench - Execute benchmarks of a package - -## SYNOPSIS - -`cargo bench` [_options_] [_benchname_] [`--` _bench-options_] - -## DESCRIPTION - -Compile and execute benchmarks. - -The benchmark filtering argument _benchname_ and all the arguments following -the two dashes (`--`) are passed to the benchmark binaries and thus to -_libtest_ (rustc's built in unit-test and micro-benchmarking framework). If -you are passing arguments to both Cargo and the binary, the ones after `--` go -to the binary, the ones before go to Cargo. For details about libtest's -arguments see the output of `cargo bench -- --help` and check out the rustc -book's chapter on how tests work at -. - -As an example, this will run only the benchmark named `foo` (and skip other -similarly named benchmarks like `foobar`): - - cargo bench -- foo --exact - -Benchmarks are built with the `--test` option to `rustc` which creates an -executable with a `main` function that automatically runs all functions -annotated with the `#[bench]` attribute. Cargo passes the `--bench` flag to -the test harness to tell it to run only benchmarks. - -The libtest harness may be disabled by setting `harness = false` in the target -manifest settings, in which case your code will need to provide its own `main` -function to handle running benchmarks. - -> **Note**: The -> [`#[bench]` attribute](https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html) -> is currently unstable and only available on the -> [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html). -> There are some packages available on -> [crates.io](https://crates.io/keywords/benchmark) that may help with -> running benchmarks on the stable channel, such as -> [Criterion](https://crates.io/crates/criterion). - -By default, `cargo bench` uses the [`bench` profile], which enables -optimizations and disables debugging information. If you need to debug a -benchmark, you can use the `--profile=dev` command-line option to switch to -the dev profile. You can then run the debug-enabled benchmark within a -debugger. - -[`bench` profile]: ../reference/profiles.html#bench - -## OPTIONS - -### Benchmark Options - -{{> options-test }} - -{{> section-package-selection }} - -### Target Selection - -When no target selection options are given, `cargo bench` will build the -following targets of the selected packages: - -- lib โ€” used to link with binaries and benchmarks -- bins (only if benchmark targets are built and required features are - available) -- lib as a benchmark -- bins as benchmarks -- benchmark targets - -The default behavior can be changed by setting the `bench` flag for the target -in the manifest settings. Setting examples to `bench = true` will build and -run the example as a benchmark. Setting targets to `bench = false` will stop -them from being benchmarked by default. Target selection options that take a -target by name ignore the `bench` flag and will always benchmark the given -target. - -{{> options-targets }} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-profile }} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} -{{/options}} - -### Display Options - -By default the Rust test harness hides output from benchmark execution to keep -results readable. Benchmark output can be recovered (e.g., for debugging) by -passing `--nocapture` to the benchmark binaries: - - cargo bench -- --nocapture - -{{#options}} - -{{> options-display }} - -{{> options-message-format }} - -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -The `--jobs` argument affects the building of the benchmark executable but -does not affect how many threads are used when running the benchmarks. The -Rust test harness runs benchmarks serially in a single thread. - -{{#options}} -{{> options-jobs }} -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Build and execute all the benchmarks of the current package: - - cargo bench - -2. Run only a specific benchmark within a specific benchmark target: - - cargo bench --bench bench_name -- modname::some_benchmark - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-test" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-build.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-build.md deleted file mode 100644 index 1a06b9983..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-build.md +++ /dev/null @@ -1,110 +0,0 @@ -# cargo-build(1) -{{*set actionverb="Build"}} - -## NAME - -cargo-build - Compile the current package - -## SYNOPSIS - -`cargo build` [_options_] - -## DESCRIPTION - -Compile local packages and all of their dependencies. - -## OPTIONS - -{{> section-package-selection }} - -### Target Selection - -When no target selection options are given, `cargo build` will build all -binary and library targets of the selected packages. Binaries are skipped if -they have `required-features` that are missing. - -{{> options-targets }} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-release }} - -{{> options-profile }} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} - -{{#option "`--out-dir` _directory_" }} -Copy final artifacts to this directory. - -This option is unstable and available only on the -[nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html) -and requires the `-Z unstable-options` flag to enable. -See for more information. -{{/option}} - -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} - -{{> options-message-format }} - -{{#option "`--build-plan`" }} -Outputs a series of JSON messages to stdout that indicate the commands to run -the build. - -This option is unstable and available only on the -[nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html) -and requires the `-Z unstable-options` flag to enable. -See for more information. -{{/option}} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{> options-future-incompat }} -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Build the local package and all of its dependencies: - - cargo build - -2. Build with optimizations: - - cargo build --release - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-rustc" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-check.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-check.md deleted file mode 100644 index e0020f7c4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-check.md +++ /dev/null @@ -1,95 +0,0 @@ -# cargo-check(1) -{{*set actionverb="Check"}} - -## NAME - -cargo-check - Check the current package - -## SYNOPSIS - -`cargo check` [_options_] - -## DESCRIPTION - -Check a local package and all of its dependencies for errors. This will -essentially compile the packages without performing the final step of code -generation, which is faster than running `cargo build`. The compiler will save -metadata files to disk so that future runs will reuse them if the source has -not been modified. Some diagnostics and errors are only emitted during code -generation, so they inherently won't be reported with `cargo check`. - -## OPTIONS - -{{> section-package-selection }} - -### Target Selection - -When no target selection options are given, `cargo check` will check all -binary and library targets of the selected packages. Binaries are skipped if -they have `required-features` that are missing. - -{{> options-targets }} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-release }} - -{{> options-profile-legacy-check }} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} - -{{> options-message-format }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{> options-future-incompat }} -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Check the local package for errors: - - cargo check - -2. Check all targets, including unit tests: - - cargo check --all-targets --profile=test - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-build" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-clean.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-clean.md deleted file mode 100644 index aa6315632..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-clean.md +++ /dev/null @@ -1,87 +0,0 @@ -# cargo-clean(1) -{{*set actionverb="Clean"}} - -## NAME - -cargo-clean - Remove generated artifacts - -## SYNOPSIS - -`cargo clean` [_options_] - -## DESCRIPTION - -Remove artifacts from the target directory that Cargo has generated in the -past. - -With no options, `cargo clean` will delete the entire target directory. - -## OPTIONS - -### Package Selection - -When no packages are selected, all packages and all dependencies in the -workspace are cleaned. - -{{#options}} -{{#option "`-p` _spec_..." "`--package` _spec_..." }} -Clean only the specified packages. This flag may be specified -multiple times. See {{man "cargo-pkgid" 1}} for the SPEC format. -{{/option}} -{{/options}} - -### Clean Options - -{{#options}} - -{{#option "`--doc`" }} -This option will cause `cargo clean` to remove only the `doc` directory in -the target directory. -{{/option}} - -{{#option "`--release`" }} -Remove all artifacts in the `release` directory. -{{/option}} - -{{#option "`--profile` _name_" }} -Remove all artifacts in the directory with the given profile name. -{{/option}} - -{{> options-target-dir }} - -{{> options-target-triple }} - -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Remove the entire target directory: - - cargo clean - -2. Remove only the release artifacts: - - cargo clean --release - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-build" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-doc.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-doc.md deleted file mode 100644 index 551adfcf9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-doc.md +++ /dev/null @@ -1,125 +0,0 @@ -# cargo-doc(1) -{{*set actionverb="Document"}} - -## NAME - -cargo-doc - Build a package's documentation - -## SYNOPSIS - -`cargo doc` [_options_] - -## DESCRIPTION - -Build the documentation for the local package and all dependencies. The output -is placed in `target/doc` in rustdoc's usual format. - -## OPTIONS - -### Documentation Options - -{{#options}} - -{{#option "`--open`" }} -Open the docs in a browser after building them. This will use your default -browser unless you define another one in the `BROWSER` environment variable -or use the [`doc.browser`](../reference/config.html#docbrowser) configuration -option. -{{/option}} - -{{#option "`--no-deps`" }} -Do not build documentation for dependencies. -{{/option}} - -{{#option "`--document-private-items`" }} -Include non-public items in the documentation. This will be enabled by default if documenting a binary target. -{{/option}} - -{{/options}} - -{{> section-package-selection }} - -### Target Selection - -When no target selection options are given, `cargo doc` will document all -binary and library targets of the selected package. The binary will be skipped -if its name is the same as the lib target. Binaries are skipped if they have -`required-features` that are missing. - -The default behavior can be changed by setting `doc = false` for the target in -the manifest settings. Using target selection options will ignore the `doc` -flag and will always document the given target. - -{{#options}} -{{> options-targets-lib-bin }} - -{{#option "`--example` _name_..." }} -{{actionverb}} the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -{{/option}} - -{{#option "`--examples`" }} -{{actionverb}} all example targets. -{{/option}} - -{{/options}} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-release }} - -{{> options-profile }} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} - -{{> options-message-format }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Build the local package documentation and its dependencies and output to - `target/doc`. - - cargo doc - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-rustdoc" 1}}, {{man "rustdoc" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-fetch.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-fetch.md deleted file mode 100644 index 2cca5f797..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-fetch.md +++ /dev/null @@ -1,63 +0,0 @@ -# cargo-fetch(1) -{{*set actionverb="Fetch"}} - -## NAME - -cargo-fetch - Fetch dependencies of a package from the network - -## SYNOPSIS - -`cargo fetch` [_options_] - -## DESCRIPTION - -If a `Cargo.lock` file is available, this command will ensure that all of the -git dependencies and/or registry dependencies are downloaded and locally -available. Subsequent Cargo commands never touch the network after a `cargo -fetch` unless the lock file changes. - -If the lock file is not available, then this command will generate the lock -file before fetching the dependencies. - -If `--target` is not specified, then all target dependencies are fetched. - -See also the [cargo-prefetch](https://crates.io/crates/cargo-prefetch) -plugin which adds a command to download popular crates. This may be useful if -you plan to use Cargo without a network with the `--offline` flag. - -## OPTIONS - -### Fetch options - -{{#options}} -{{> options-target-triple }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Fetch all dependencies: - - cargo fetch - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-update" 1}}, {{man "cargo-generate-lockfile" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-fix.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-fix.md deleted file mode 100644 index 2d96efd42..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-fix.md +++ /dev/null @@ -1,178 +0,0 @@ -# cargo-fix(1) -{{*set actionverb="Fix"}} - -## NAME - -cargo-fix - Automatically fix lint warnings reported by rustc - -## SYNOPSIS - -`cargo fix` [_options_] - -## DESCRIPTION - -This Cargo subcommand will automatically take rustc's suggestions from -diagnostics like warnings and apply them to your source code. This is intended -to help automate tasks that rustc itself already knows how to tell you to fix! - -Executing `cargo fix` will under the hood execute {{man "cargo-check" 1}}. Any warnings -applicable to your crate will be automatically fixed (if possible) and all -remaining warnings will be displayed when the check process is finished. For -example if you'd like to apply all fixes to the current package, you can run: - - cargo fix - -which behaves the same as `cargo check --all-targets`. - -`cargo fix` is only capable of fixing code that is normally compiled with -`cargo check`. If code is conditionally enabled with optional features, you -will need to enable those features for that code to be analyzed: - - cargo fix --features foo - -Similarly, other `cfg` expressions like platform-specific code will need to -pass `--target` to fix code for the given target. - - cargo fix --target x86_64-pc-windows-gnu - -If you encounter any problems with `cargo fix` or otherwise have any questions -or feature requests please don't hesitate to file an issue at -. - -### Edition migration - -The `cargo fix` subcommand can also be used to migrate a package from one -[edition] to the next. The general procedure is: - -1. Run `cargo fix --edition`. Consider also using the `--all-features` flag if - your project has multiple features. You may also want to run `cargo fix - --edition` multiple times with different `--target` flags if your project - has platform-specific code gated by `cfg` attributes. -2. Modify `Cargo.toml` to set the [edition field] to the new edition. -3. Run your project tests to verify that everything still works. If new - warnings are issued, you may want to consider running `cargo fix` again - (without the `--edition` flag) to apply any suggestions given by the - compiler. - -And hopefully that's it! Just keep in mind of the caveats mentioned above that -`cargo fix` cannot update code for inactive features or `cfg` expressions. -Also, in some rare cases the compiler is unable to automatically migrate all -code to the new edition, and this may require manual changes after building -with the new edition. - -[edition]: https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html -[edition field]: ../reference/manifest.html#the-edition-field - -## OPTIONS - -### Fix options - -{{#options}} - -{{#option "`--broken-code`" }} -Fix code even if it already has compiler errors. This is useful if `cargo fix` -fails to apply the changes. It will apply the changes and leave the broken -code in the working directory for you to inspect and manually fix. -{{/option}} - -{{#option "`--edition`" }} -Apply changes that will update the code to the next edition. This will not -update the edition in the `Cargo.toml` manifest, which must be updated -manually after `cargo fix --edition` has finished. -{{/option}} - -{{#option "`--edition-idioms`" }} -Apply suggestions that will update code to the preferred style for the current -edition. -{{/option}} - -{{#option "`--allow-no-vcs`" }} -Fix code even if a VCS was not detected. -{{/option}} - -{{#option "`--allow-dirty`" }} -Fix code even if the working directory has changes. -{{/option}} - -{{#option "`--allow-staged`" }} -Fix code even if the working directory has staged changes. -{{/option}} - -{{/options}} - -{{> section-package-selection }} - -### Target Selection - -When no target selection options are given, `cargo fix` will fix all targets -(`--all-targets` implied). Binaries are skipped if they have -`required-features` that are missing. - -{{> options-targets }} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-release }} - -{{> options-profile-legacy-check }} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} - -{{> options-message-format }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Apply compiler suggestions to the local package: - - cargo fix - -2. Update a package to prepare it for the next edition: - - cargo fix --edition - -3. Apply suggested idioms for the current edition: - - cargo fix --edition-idioms - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-check" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-generate-lockfile.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-generate-lockfile.md deleted file mode 100644 index a2b963cef..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-generate-lockfile.md +++ /dev/null @@ -1,49 +0,0 @@ -# cargo-generate-lockfile(1) - -## NAME - -cargo-generate-lockfile - Generate the lockfile for a package - -## SYNOPSIS - -`cargo generate-lockfile` [_options_] - -## DESCRIPTION - -This command will create the `Cargo.lock` lockfile for the current package or -workspace. If the lockfile already exists, it will be rebuilt with the latest -available version of every package. - -See also {{man "cargo-update" 1}} which is also capable of creating a `Cargo.lock` -lockfile and has more options for controlling update behavior. - -## OPTIONS - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Create or update the lockfile for the current package or workspace: - - cargo generate-lockfile - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-update" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-help.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-help.md deleted file mode 100644 index edd8bc0cb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-help.md +++ /dev/null @@ -1,26 +0,0 @@ -# cargo-help(1) - -## NAME - -cargo-help - Get help for a Cargo command - -## SYNOPSIS - -`cargo help` [_subcommand_] - -## DESCRIPTION - -Prints a help message for the given command. - -## EXAMPLES - -1. Get help for a command: - - cargo help build - -2. Help is also available with the `--help` flag: - - cargo build --help - -## SEE ALSO -{{man "cargo" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-init.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-init.md deleted file mode 100644 index bdb5a8270..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-init.md +++ /dev/null @@ -1,51 +0,0 @@ -# cargo-init(1) - -## NAME - -cargo-init - Create a new Cargo package in an existing directory - -## SYNOPSIS - -`cargo init` [_options_] [_path_] - -## DESCRIPTION - -This command will create a new Cargo manifest in the current directory. Give a -path as an argument to create in the given directory. - -If there are typically-named Rust source files already in the directory, those -will be used. If not, then a sample `src/main.rs` file will be created, or -`src/lib.rs` if `--lib` is passed. - -If the directory is not already in a VCS repository, then a new repository -is created (see `--vcs` below). - -See {{man "cargo-new" 1}} for a similar command which will create a new package in -a new directory. - -## OPTIONS - -### Init Options - -{{> options-new }} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Create a binary Cargo package in the current directory: - - cargo init - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-new" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-install.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-install.md deleted file mode 100644 index 28d7c6ab1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-install.md +++ /dev/null @@ -1,214 +0,0 @@ -# cargo-install(1) -{{*set actionverb="Install"}} -{{*set temp-target-dir=true}} - -## NAME - -cargo-install - Build and install a Rust binary - -## SYNOPSIS - -`cargo install` [_options_] _crate_...\ -`cargo install` [_options_] `--path` _path_\ -`cargo install` [_options_] `--git` _url_ [_crate_...]\ -`cargo install` [_options_] `--list` - -## DESCRIPTION - -This command manages Cargo's local set of installed binary crates. Only -packages which have executable `[[bin]]` or `[[example]]` targets can be -installed, and all executables are installed into the installation root's -`bin` folder. - -{{> description-install-root }} - -There are multiple sources from which a crate can be installed. The default -location is crates.io but the `--git`, `--path`, and `--registry` flags can -change this source. If the source contains more than one package (such as -crates.io or a git repository with multiple crates) the _crate_ argument is -required to indicate which crate should be installed. - -Crates from crates.io can optionally specify the version they wish to install -via the `--version` flags, and similarly packages from git repositories can -optionally specify the branch, tag, or revision that should be installed. If a -crate has multiple binaries, the `--bin` argument can selectively install only -one of them, and if you'd rather install examples the `--example` argument can -be used as well. - -If the package is already installed, Cargo will reinstall it if the installed -version does not appear to be up-to-date. If any of the following values -change, then Cargo will reinstall the package: - -- The package version and source. -- The set of binary names installed. -- The chosen features. -- The profile (`--profile`). -- The target (`--target`). - -Installing with `--path` will always build and install, unless there are -conflicting binaries from another package. The `--force` flag may be used to -force Cargo to always reinstall the package. - -If the source is crates.io or `--git` then by default the crate will be built -in a temporary target directory. To avoid this, the target directory can be -specified by setting the `CARGO_TARGET_DIR` environment variable to a relative -path. In particular, this can be useful for caching build artifacts on -continuous integration systems. - -By default, the `Cargo.lock` file that is included with the package will be -ignored. This means that Cargo will recompute which versions of dependencies -to use, possibly using newer versions that have been released since the -package was published. The `--locked` flag can be used to force Cargo to use -the packaged `Cargo.lock` file if it is available. This may be useful for -ensuring reproducible builds, to use the exact same set of dependencies that -were available when the package was published. It may also be useful if a -newer version of a dependency is published that no longer builds on your -system, or has other problems. The downside to using `--locked` is that you -will not receive any fixes or updates to any dependency. Note that Cargo did -not start publishing `Cargo.lock` files until version 1.37, which means -packages published with prior versions will not have a `Cargo.lock` file -available. - -## OPTIONS - -### Install Options - -{{#options}} - -{{#option "`--vers` _version_" "`--version` _version_" }} -Specify a version to install. This may be a [version -requirement](../reference/specifying-dependencies.md), like `~1.2`, to have Cargo -select the newest version from the given requirement. If the version does not -have a requirement operator (such as `^` or `~`), then it must be in the form -_MAJOR.MINOR.PATCH_, and will install exactly that version; it is *not* -treated as a caret requirement like Cargo dependencies are. -{{/option}} - -{{#option "`--git` _url_" }} -Git URL to install the specified crate from. -{{/option}} - -{{#option "`--branch` _branch_" }} -Branch to use when installing from git. -{{/option}} - -{{#option "`--tag` _tag_" }} -Tag to use when installing from git. -{{/option}} - -{{#option "`--rev` _sha_" }} -Specific commit to use when installing from git. -{{/option}} - -{{#option "`--path` _path_" }} -Filesystem path to local crate to install. -{{/option}} - -{{#option "`--list`" }} -List all installed packages and their versions. -{{/option}} - -{{#option "`-f`" "`--force`" }} -Force overwriting existing crates or binaries. This can be used if a package -has installed a binary with the same name as another package. This is also -useful if something has changed on the system that you want to rebuild with, -such as a newer version of `rustc`. -{{/option}} - -{{#option "`--no-track`" }} -By default, Cargo keeps track of the installed packages with a metadata file -stored in the installation root directory. This flag tells Cargo not to use or -create that file. With this flag, Cargo will refuse to overwrite any existing -files unless the `--force` flag is used. This also disables Cargo's ability to -protect against multiple concurrent invocations of Cargo installing at the -same time. -{{/option}} - -{{#option "`--bin` _name_..." }} -Install only the specified binary. -{{/option}} - -{{#option "`--bins`" }} -Install all binaries. -{{/option}} - -{{#option "`--example` _name_..." }} -Install only the specified example. -{{/option}} - -{{#option "`--examples`" }} -Install all examples. -{{/option}} - -{{#option "`--root` _dir_" }} -Directory to install packages into. -{{/option}} - -{{> options-registry }} - -{{> options-index }} - -{{/options}} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-target-dir }} - -{{#option "`--debug`" }} -Build with the `dev` profile instead the `release` profile. -See also the `--profile` option for choosing a specific profile by name. -{{/option}} - -{{> options-profile }} - -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-locked }} -{{/options}} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} - -{{> options-message-format }} - -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Install or upgrade a package from crates.io: - - cargo install ripgrep - -2. Install or reinstall the package in the current directory: - - cargo install --path . - -3. View the list of installed packages: - - cargo install --list - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-uninstall" 1}}, {{man "cargo-search" 1}}, {{man "cargo-publish" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-locate-project.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-locate-project.md deleted file mode 100644 index 89ff79abd..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-locate-project.md +++ /dev/null @@ -1,60 +0,0 @@ -# cargo-locate-project(1) - -## NAME - -cargo-locate-project - Print a JSON representation of a Cargo.toml file's location - -## SYNOPSIS - -`cargo locate-project` [_options_] - -## DESCRIPTION - -This command will print a JSON object to stdout with the full path to the -`Cargo.toml` manifest. - -## OPTIONS - -{{#options}} - -{{#option "`--workspace`" }} -Locate the `Cargo.toml` at the root of the workspace, as opposed to the current -workspace member. -{{/option}} - -{{/options}} - -### Display Options - -{{#options}} - -{{#option "`--message-format` _fmt_" }} -The representation in which to print the project location. Valid values: - -- `json` (default): JSON object with the path under the key "root". -- `plain`: Just the path. -{{/option}} - -{{> options-display }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Display the path to the manifest based on the current directory: - - cargo locate-project - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-metadata" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-login.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-login.md deleted file mode 100644 index 0e361ab74..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-login.md +++ /dev/null @@ -1,51 +0,0 @@ -# cargo-login(1) - -## NAME - -cargo-login - Save an API token from the registry locally - -## SYNOPSIS - -`cargo login` [_options_] [_token_] - -## DESCRIPTION - -This command will save the API token to disk so that commands that require -authentication, such as {{man "cargo-publish" 1}}, will be automatically -authenticated. The token is saved in `$CARGO_HOME/credentials.toml`. `CARGO_HOME` -defaults to `.cargo` in your home directory. - -If the _token_ argument is not specified, it will be read from stdin. - -The API token for crates.io may be retrieved from . - -Take care to keep the token secret, it should not be shared with anyone else. - -## OPTIONS - -### Login Options - -{{#options}} -{{> options-registry }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Save the API token to disk: - - cargo login - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-publish" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-metadata.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-metadata.md deleted file mode 100644 index c1a8920bc..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-metadata.md +++ /dev/null @@ -1,352 +0,0 @@ -# cargo-metadata(1) - -## NAME - -cargo-metadata - Machine-readable metadata about the current package - -## SYNOPSIS - -`cargo metadata` [_options_] - -## DESCRIPTION - -Output JSON to stdout containing information about the workspace members and -resolved dependencies of the current package. - -It is recommended to include the `--format-version` flag to future-proof -your code to ensure the output is in the format you are expecting. - -See the [cargo_metadata crate](https://crates.io/crates/cargo_metadata) -for a Rust API for reading the metadata. - -## OUTPUT FORMAT - -The output has the following format: - -```javascript -{ - /* Array of all packages in the workspace. - It also includes all feature-enabled dependencies unless --no-deps is used. - */ - "packages": [ - { - /* The name of the package. */ - "name": "my-package", - /* The version of the package. */ - "version": "0.1.0", - /* The Package ID, a unique identifier for referring to the package. */ - "id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* The license value from the manifest, or null. */ - "license": "MIT/Apache-2.0", - /* The license-file value from the manifest, or null. */ - "license_file": "LICENSE", - /* The description value from the manifest, or null. */ - "description": "Package description.", - /* The source ID of the package. This represents where - a package is retrieved from. - This is null for path dependencies and workspace members. - For other dependencies, it is a string with the format: - - "registry+URL" for registry-based dependencies. - Example: "registry+https://github.com/rust-lang/crates.io-index" - - "git+URL" for git-based dependencies. - Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" - */ - "source": null, - /* Array of dependencies declared in the package's manifest. */ - "dependencies": [ - { - /* The name of the dependency. */ - "name": "bitflags", - /* The source ID of the dependency. May be null, see - description for the package source. - */ - "source": "registry+https://github.com/rust-lang/crates.io-index", - /* The version requirement for the dependency. - Dependencies without a version requirement have a value of "*". - */ - "req": "^1.0", - /* The dependency kind. - "dev", "build", or null for a normal dependency. - */ - "kind": null, - /* If the dependency is renamed, this is the new name for - the dependency as a string. null if it is not renamed. - */ - "rename": null, - /* Boolean of whether or not this is an optional dependency. */ - "optional": false, - /* Boolean of whether or not default features are enabled. */ - "uses_default_features": true, - /* Array of features enabled. */ - "features": [], - /* The target platform for the dependency. - null if not a target dependency. - */ - "target": "cfg(windows)", - /* The file system path for a local path dependency. - not present if not a path dependency. - */ - "path": "/path/to/dep", - /* A string of the URL of the registry this dependency is from. - If not specified or null, the dependency is from the default - registry (crates.io). - */ - "registry": null - } - ], - /* Array of Cargo targets. */ - "targets": [ - { - /* Array of target kinds. - - lib targets list the `crate-type` values from the - manifest such as "lib", "rlib", "dylib", - "proc-macro", etc. (default ["lib"]) - - binary is ["bin"] - - example is ["example"] - - integration test is ["test"] - - benchmark is ["bench"] - - build script is ["custom-build"] - */ - "kind": [ - "bin" - ], - /* Array of crate types. - - lib and example libraries list the `crate-type` values - from the manifest such as "lib", "rlib", "dylib", - "proc-macro", etc. (default ["lib"]) - - all other target kinds are ["bin"] - */ - "crate_types": [ - "bin" - ], - /* The name of the target. */ - "name": "my-package", - /* Absolute path to the root source file of the target. */ - "src_path": "/path/to/my-package/src/main.rs", - /* The Rust edition of the target. - Defaults to the package edition. - */ - "edition": "2018", - /* Array of required features. - This property is not included if no required features are set. - */ - "required-features": ["feat1"], - /* Whether the target should be documented by `cargo doc`. */ - "doc": true, - /* Whether or not this target has doc tests enabled, and - the target is compatible with doc testing. - */ - "doctest": false, - /* Whether or not this target should be built and run with `--test` - */ - "test": true - } - ], - /* Set of features defined for the package. - Each feature maps to an array of features or dependencies it - enables. - */ - "features": { - "default": [ - "feat1" - ], - "feat1": [], - "feat2": [] - }, - /* Absolute path to this package's manifest. */ - "manifest_path": "/path/to/my-package/Cargo.toml", - /* Package metadata. - This is null if no metadata is specified. - */ - "metadata": { - "docs": { - "rs": { - "all-features": true - } - } - }, - /* List of registries to which this package may be published. - Publishing is unrestricted if null, and forbidden if an empty array. */ - "publish": [ - "crates-io" - ], - /* Array of authors from the manifest. - Empty array if no authors specified. - */ - "authors": [ - "Jane Doe " - ], - /* Array of categories from the manifest. */ - "categories": [ - "command-line-utilities" - ], - /* Optional string that is the default binary picked by cargo run. */ - "default_run": null, - /* Optional string that is the minimum supported rust version */ - "rust_version": "1.56", - /* Array of keywords from the manifest. */ - "keywords": [ - "cli" - ], - /* The readme value from the manifest or null if not specified. */ - "readme": "README.md", - /* The repository value from the manifest or null if not specified. */ - "repository": "https://github.com/rust-lang/cargo", - /* The homepage value from the manifest or null if not specified. */ - "homepage": "https://rust-lang.org", - /* The documentation value from the manifest or null if not specified. */ - "documentation": "https://doc.rust-lang.org/stable/std", - /* The default edition of the package. - Note that individual targets may have different editions. - */ - "edition": "2018", - /* Optional string that is the name of a native library the package - is linking to. - */ - "links": null, - } - ], - /* Array of members of the workspace. - Each entry is the Package ID for the package. - */ - "workspace_members": [ - "my-package 0.1.0 (path+file:///path/to/my-package)", - ], - // The resolved dependency graph for the entire workspace. The enabled - // features are based on the enabled features for the "current" package. - // Inactivated optional dependencies are not listed. - // - // This is null if --no-deps is specified. - // - // By default, this includes all dependencies for all target platforms. - // The `--filter-platform` flag may be used to narrow to a specific - // target triple. - "resolve": { - /* Array of nodes within the dependency graph. - Each node is a package. - */ - "nodes": [ - { - /* The Package ID of this node. */ - "id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* The dependencies of this package, an array of Package IDs. */ - "dependencies": [ - "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" - ], - /* The dependencies of this package. This is an alternative to - "dependencies" which contains additional information. In - particular, this handles renamed dependencies. - */ - "deps": [ - { - /* The name of the dependency's library target. - If this is a renamed dependency, this is the new - name. - */ - "name": "bitflags", - /* The Package ID of the dependency. */ - "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - /* Array of dependency kinds. Added in Cargo 1.40. */ - "dep_kinds": [ - { - /* The dependency kind. - "dev", "build", or null for a normal dependency. - */ - "kind": null, - /* The target platform for the dependency. - null if not a target dependency. - */ - "target": "cfg(windows)" - } - ] - } - ], - /* Array of features enabled on this package. */ - "features": [ - "default" - ] - } - ], - /* The root package of the workspace. - This is null if this is a virtual workspace. Otherwise it is - the Package ID of the root package. - */ - "root": "my-package 0.1.0 (path+file:///path/to/my-package)" - }, - /* The absolute path to the build directory where Cargo places its output. */ - "target_directory": "/path/to/my-package/target", - /* The version of the schema for this metadata structure. - This will be changed if incompatible changes are ever made. - */ - "version": 1, - /* The absolute path to the root of the workspace. */ - "workspace_root": "/path/to/my-package" - /* Workspace metadata. - This is null if no metadata is specified. */ - "metadata": { - "docs": { - "rs": { - "all-features": true - } - } - } -} -```` - -## OPTIONS - -### Output Options - -{{#options}} - -{{#option "`--no-deps`" }} -Output information only about the workspace members and don't fetch -dependencies. -{{/option}} - -{{#option "`--format-version` _version_" }} -Specify the version of the output format to use. Currently `1` is the only -possible value. -{{/option}} - -{{#option "`--filter-platform` _triple_" }} -This filters the `resolve` output to only include dependencies for the -given target triple. Without this flag, the resolve includes all targets. - -Note that the dependencies listed in the "packages" array still includes all -dependencies. Each package definition is intended to be an unaltered -reproduction of the information within `Cargo.toml`. -{{/option}} - -{{/options}} - -{{> section-features }} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Output JSON about the current package: - - cargo metadata --format-version=1 - -## SEE ALSO -{{man "cargo" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-new.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-new.md deleted file mode 100644 index d0ca91812..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-new.md +++ /dev/null @@ -1,46 +0,0 @@ -# cargo-new(1) - -## NAME - -cargo-new - Create a new Cargo package - -## SYNOPSIS - -`cargo new` [_options_] _path_ - -## DESCRIPTION - -This command will create a new Cargo package in the given directory. This -includes a simple template with a `Cargo.toml` manifest, sample source file, -and a VCS ignore file. If the directory is not already in a VCS repository, -then a new repository is created (see `--vcs` below). - -See {{man "cargo-init" 1}} for a similar command which will create a new manifest -in an existing directory. - -## OPTIONS - -### New Options - -{{> options-new }} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Create a binary Cargo package in the given directory: - - cargo new foo - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-init" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-owner.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-owner.md deleted file mode 100644 index 3787a4de0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-owner.md +++ /dev/null @@ -1,81 +0,0 @@ -# cargo-owner(1) - -## NAME - -cargo-owner - Manage the owners of a crate on the registry - -## SYNOPSIS - -`cargo owner` [_options_] `--add` _login_ [_crate_]\ -`cargo owner` [_options_] `--remove` _login_ [_crate_]\ -`cargo owner` [_options_] `--list` [_crate_] - -## DESCRIPTION - -This command will modify the owners for a crate on the registry. Owners of a -crate can upload new versions and yank old versions. Non-team owners can also -modify the set of owners, so take care! - -This command requires you to be authenticated with either the `--token` option -or using {{man "cargo-login" 1}}. - -If the crate name is not specified, it will use the package name from the -current directory. - -See [the reference](../reference/publishing.html#cargo-owner) for more -information about owners and publishing. - -## OPTIONS - -### Owner Options - -{{#options}} - -{{#option "`-a`" "`--add` _login_..." }} -Invite the given user or team as an owner. -{{/option}} - -{{#option "`-r`" "`--remove` _login_..." }} -Remove the given user or team as an owner. -{{/option}} - -{{#option "`-l`" "`--list`" }} -List owners of a crate. -{{/option}} - -{{> options-token }} - -{{> options-index }} - -{{> options-registry }} - -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. List owners of a package: - - cargo owner --list foo - -2. Invite an owner to a package: - - cargo owner --add username foo - -3. Remove an owner from a package: - - cargo owner --remove username foo - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-login" 1}}, {{man "cargo-publish" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-package.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-package.md deleted file mode 100644 index 32d936f84..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-package.md +++ /dev/null @@ -1,136 +0,0 @@ -# cargo-package(1) -{{*set actionverb="Package"}} -{{*set noall=true}} - -## NAME - -cargo-package - Assemble the local package into a distributable tarball - -## SYNOPSIS - -`cargo package` [_options_] - -## DESCRIPTION - -This command will create a distributable, compressed `.crate` file with the -source code of the package in the current directory. The resulting file will -be stored in the `target/package` directory. This performs the following -steps: - -1. Load and check the current workspace, performing some basic checks. - - Path dependencies are not allowed unless they have a version key. Cargo - will ignore the path key for dependencies in published packages. - `dev-dependencies` do not have this restriction. -2. Create the compressed `.crate` file. - - The original `Cargo.toml` file is rewritten and normalized. - - `[patch]`, `[replace]`, and `[workspace]` sections are removed from the - manifest. - - `Cargo.lock` is automatically included if the package contains an - executable binary or example target. {{man "cargo-install" 1}} will use the - packaged lock file if the `--locked` flag is used. - - A `.cargo_vcs_info.json` file is included that contains information - about the current VCS checkout hash if available (not included with - `--allow-dirty`). -3. Extract the `.crate` file and build it to verify it can build. - - This will rebuild your package from scratch to ensure that it can be - built from a pristine state. The `--no-verify` flag can be used to skip - this step. -4. Check that build scripts did not modify any source files. - -The list of files included can be controlled with the `include` and `exclude` -fields in the manifest. - -See [the reference](../reference/publishing.html) for more details about -packaging and publishing. - -### .cargo_vcs_info.json format - -Will generate a `.cargo_vcs_info.json` in the following format - -```javascript -{ - "git": { - "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302" - }, - "path_in_vcs": "" -} -``` - -`path_in_vcs` will be set to a repo-relative path for packages -in subdirectories of the version control repository. - -## OPTIONS - -### Package Options - -{{#options}} - -{{#option "`-l`" "`--list`" }} -Print files included in a package without making one. -{{/option}} - -{{#option "`--no-verify`" }} -Don't verify the contents by building them. -{{/option}} - -{{#option "`--no-metadata`" }} -Ignore warnings about a lack of human-usable metadata (such as the description -or the license). -{{/option}} - -{{#option "`--allow-dirty`" }} -Allow working directories with uncommitted VCS changes to be packaged. -{{/option}} - -{{/options}} - -{{> section-package-selection }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-target-dir }} - -{{/options}} - -{{> section-features }} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Create a compressed `.crate` file of the current package: - - cargo package - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-publish" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-pkgid.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-pkgid.md deleted file mode 100644 index 1fb9a60ef..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-pkgid.md +++ /dev/null @@ -1,89 +0,0 @@ -# cargo-pkgid(1) - -## NAME - -cargo-pkgid - Print a fully qualified package specification - -## SYNOPSIS - -`cargo pkgid` [_options_] [_spec_] - -## DESCRIPTION - -Given a _spec_ argument, print out the fully qualified package ID specifier -for a package or dependency in the current workspace. This command will -generate an error if _spec_ is ambiguous as to which package it refers to in -the dependency graph. If no _spec_ is given, then the specifier for the local -package is printed. - -This command requires that a lockfile is available and dependencies have been -fetched. - -A package specifier consists of a name, version, and source URL. You are -allowed to use partial specifiers to succinctly match a specific package as -long as it matches only one package. The format of a _spec_ can be one of the -following: - -SPEC Structure | Example SPEC ----------------------------|-------------- -_name_ | `bitflags` -_name_`:`_version_ | `bitflags:1.0.4` -_url_ | `https://github.com/rust-lang/cargo` -_url_`#`_version_ | `https://github.com/rust-lang/cargo#0.33.0` -_url_`#`_name_ | `https://github.com/rust-lang/crates.io-index#bitflags` -_url_`#`_name_`:`_version_ | `https://github.com/rust-lang/cargo#crates-io:0.21.0` - -## OPTIONS - -### Package Selection - -{{#options}} - -{{#option "`-p` _spec_" "`--package` _spec_" }} -Get the package ID for the given package instead of the current package. -{{/option}} - -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Retrieve package specification for `foo` package: - - cargo pkgid foo - -2. Retrieve package specification for version 1.0.0 of `foo`: - - cargo pkgid foo:1.0.0 - -3. Retrieve package specification for `foo` from crates.io: - - cargo pkgid https://github.com/rust-lang/crates.io-index#foo - -4. Retrieve package specification for `foo` from a local package: - - cargo pkgid file:///path/to/local/package#foo - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-generate-lockfile" 1}}, {{man "cargo-metadata" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-publish.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-publish.md deleted file mode 100644 index 13ee736e9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-publish.md +++ /dev/null @@ -1,115 +0,0 @@ -# cargo-publish(1) -{{*set actionverb="Publish"}} - -## NAME - -cargo-publish - Upload a package to the registry - -## SYNOPSIS - -`cargo publish` [_options_] - -## DESCRIPTION - -This command will create a distributable, compressed `.crate` file with the -source code of the package in the current directory and upload it to a -registry. The default registry is . This performs the -following steps: - -1. Performs a few checks, including: - - Checks the `package.publish` key in the manifest for restrictions on - which registries you are allowed to publish to. -2. Create a `.crate` file by following the steps in {{man "cargo-package" 1}}. -3. Upload the crate to the registry. Note that the server will perform - additional checks on the crate. - -This command requires you to be authenticated with either the `--token` option -or using {{man "cargo-login" 1}}. - -See [the reference](../reference/publishing.html) for more details about -packaging and publishing. - -## OPTIONS - -### Publish Options - -{{#options}} - -{{#option "`--dry-run`" }} -Perform all checks without uploading. -{{/option}} - -{{> options-token }} - -{{#option "`--no-verify`" }} -Don't verify the contents by building them. -{{/option}} - -{{#option "`--allow-dirty`" }} -Allow working directories with uncommitted VCS changes to be packaged. -{{/option}} - -{{> options-index }} - -{{#option "`--registry` _registry_"}} -Name of the registry to publish to. Registry names are defined in [Cargo -config files](../reference/config.html). If not specified, and there is a -[`package.publish`](../reference/manifest.html#the-publish-field) field in -`Cargo.toml` with a single registry, then it will publish to that registry. -Otherwise it will use the default registry, which is defined by the -[`registry.default`](../reference/config.html#registrydefault) config key -which defaults to `crates-io`. -{{/option}} - -{{/options}} - -{{> section-options-package }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-target-dir }} - -{{/options}} - -{{> section-features }} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Publish the current package: - - cargo publish - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-package" 1}}, {{man "cargo-login" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-report.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-report.md deleted file mode 100644 index a505a014a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-report.md +++ /dev/null @@ -1,42 +0,0 @@ -# cargo-report(1) - -## NAME - -cargo-report - Generate and display various kinds of reports - -## SYNOPSIS - -`cargo report` _type_ [_options_] - -### DESCRIPTION - -Displays a report of the given _type_ - currently, only `future-incompat` is supported - -## OPTIONS - -{{#options}} - -{{#option "`--id` _id_" }} -Show the report with the specified Cargo-generated id -{{/option}} - -{{#option "`-p` _spec_..." "`--package` _spec_..." }} -Only display a report for the specified package -{{/option}} - -{{/options}} - -## EXAMPLES - -1. Display the latest future-incompat report: - - cargo report future-incompat - -2. Display the latest future-incompat report for a specific package: - - cargo report future-incompat --package my-dep:0.0.1 - -## SEE ALSO -[Future incompat report](../reference/future-incompat-report.html) - -{{man "cargo" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-run.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-run.md deleted file mode 100644 index b4e1d0669..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-run.md +++ /dev/null @@ -1,108 +0,0 @@ -# cargo-run(1) -{{*set actionverb="Run"}} - -## NAME - -cargo-run - Run the current package - -## SYNOPSIS - -`cargo run` [_options_] [`--` _args_] - -## DESCRIPTION - -Run a binary or example of the local package. - -All the arguments following the two dashes (`--`) are passed to the binary to -run. If you're passing arguments to both Cargo and the binary, the ones after -`--` go to the binary, the ones before go to Cargo. - -## OPTIONS - -{{> section-options-package }} - -### Target Selection - -When no target selection options are given, `cargo run` will run the binary -target. If there are multiple binary targets, you must pass a target flag to -choose one. Or, the `default-run` field may be specified in the `[package]` -section of `Cargo.toml` to choose the name of the binary to run by default. - -{{#options}} - -{{#option "`--bin` _name_" }} -Run the specified binary. -{{/option}} - -{{#option "`--example` _name_" }} -Run the specified example. -{{/option}} - -{{/options}} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-release }} - -{{> options-profile }} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} -{{/options}} - -### Display Options - -{{#options}} - -{{> options-display }} - -{{> options-message-format }} - -{{/options}} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Build the local package and run its main target (assuming only one binary): - - cargo run - -2. Run an example with extra arguments: - - cargo run --example exname -- --exoption exarg1 exarg2 - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-build" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-rustc.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-rustc.md deleted file mode 100644 index 53ed339ca..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-rustc.md +++ /dev/null @@ -1,122 +0,0 @@ -# cargo-rustc(1) -{{*set actionverb="Build"}} - -## NAME - -cargo-rustc - Compile the current package, and pass extra options to the compiler - -## SYNOPSIS - -`cargo rustc` [_options_] [`--` _args_] - -## DESCRIPTION - -The specified target for the current package (or package specified by `-p` if -provided) will be compiled along with all of its dependencies. The specified -_args_ will all be passed to the final compiler invocation, not any of the -dependencies. Note that the compiler will still unconditionally receive -arguments such as `-L`, `--extern`, and `--crate-type`, and the specified -_args_ will simply be added to the compiler invocation. - -See for documentation on rustc -flags. - -{{> description-one-target }} -To pass flags to all compiler processes spawned by Cargo, use the `RUSTFLAGS` -[environment variable](../reference/environment-variables.html) or the -`build.rustflags` [config value](../reference/config.html). - -## OPTIONS - -{{> section-options-package }} - -### Target Selection - -When no target selection options are given, `cargo rustc` will build all -binary and library targets of the selected package. - -{{> options-targets }} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-release }} - -{{#option "`--profile` _name_" }} -Build with the given profile. - -The `rustc` subcommand will treat the following named profiles with special behaviors: - -* `check` โ€” Builds in the same way as the {{man "cargo-check" 1}} command with - the `dev` profile. -* `test` โ€” Builds in the same way as the {{man "cargo-test" 1}} command, - enabling building in test mode which will enable tests and enable the `test` - cfg option. See [rustc - tests](https://doc.rust-lang.org/rustc/tests/index.html) for more detail. -* `bench` โ€” Builds in the same was as the {{man "cargo-bench" 1}} command, - similar to the `test` profile. - -See the [the reference](../reference/profiles.html) for more details on profiles. -{{/option}} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} -{{/options}} - -### Display Options - -{{#options}} - -{{> options-display }} - -{{> options-message-format }} - -{{/options}} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{> options-future-incompat }} -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Check if your package (not including dependencies) uses unsafe code: - - cargo rustc --lib -- -D unsafe-code - -2. Try an experimental flag on the nightly compiler, such as this which prints - the size of every type: - - cargo rustc --lib -- -Z print-type-sizes - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-build" 1}}, {{man "rustc" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-rustdoc.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-rustdoc.md deleted file mode 100644 index 278c56964..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-rustdoc.md +++ /dev/null @@ -1,112 +0,0 @@ -# cargo-rustdoc(1) -{{*set actionverb="Document"}} - -## NAME - -cargo-rustdoc - Build a package's documentation, using specified custom flags - -## SYNOPSIS - -`cargo rustdoc` [_options_] [`--` _args_] - -## DESCRIPTION - -The specified target for the current package (or package specified by `-p` if -provided) will be documented with the specified _args_ being passed to the -final rustdoc invocation. Dependencies will not be documented as part of this -command. Note that rustdoc will still unconditionally receive arguments such -as `-L`, `--extern`, and `--crate-type`, and the specified _args_ will simply -be added to the rustdoc invocation. - -See for documentation on rustdoc -flags. - -{{> description-one-target }} -To pass flags to all rustdoc processes spawned by Cargo, use the -`RUSTDOCFLAGS` [environment variable](../reference/environment-variables.html) -or the `build.rustdocflags` [config value](../reference/config.html). - -## OPTIONS - -### Documentation Options - -{{#options}} - -{{#option "`--open`" }} -Open the docs in a browser after building them. This will use your default -browser unless you define another one in the `BROWSER` environment variable -or use the [`doc.browser`](../reference/config.html#docbrowser) configuration -option. -{{/option}} - -{{/options}} - -{{> section-options-package }} - -### Target Selection - -When no target selection options are given, `cargo rustdoc` will document all -binary and library targets of the selected package. The binary will be skipped -if its name is the same as the lib target. Binaries are skipped if they have -`required-features` that are missing. - -{{> options-targets }} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-release }} - -{{> options-profile }} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} - -{{> options-message-format }} -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-manifest-path }} - -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -{{#options}} -{{> options-jobs }} -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Build documentation with custom CSS included from a given file: - - cargo rustdoc --lib -- --extend-css extra.css - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-doc" 1}}, {{man "rustdoc" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-search.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-search.md deleted file mode 100644 index 5c7bcac3c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-search.md +++ /dev/null @@ -1,52 +0,0 @@ -# cargo-search(1) - -## NAME - -cargo-search - Search packages in crates.io - -## SYNOPSIS - -`cargo search` [_options_] [_query_...] - -## DESCRIPTION - -This performs a textual search for crates on . The matching -crates will be displayed along with their description in TOML format suitable -for copying into a `Cargo.toml` manifest. - -## OPTIONS - -### Search Options - -{{#options}} - -{{#option "`--limit` _limit_" }} -Limit the number of results (default: 10, max: 100). -{{/option}} - -{{> options-index }} - -{{> options-registry }} - -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Search for a package from crates.io: - - cargo search serde - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-install" 1}}, {{man "cargo-publish" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-test.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-test.md deleted file mode 100644 index be12eaa70..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-test.md +++ /dev/null @@ -1,179 +0,0 @@ -# cargo-test(1) -{{*set actionverb="Test"}} -{{*set nouns="tests"}} - -## NAME - -cargo-test - Execute unit and integration tests of a package - -## SYNOPSIS - -`cargo test` [_options_] [_testname_] [`--` _test-options_] - -## DESCRIPTION - -Compile and execute unit and integration tests. - -The test filtering argument `TESTNAME` and all the arguments following the two -dashes (`--`) are passed to the test binaries and thus to _libtest_ (rustc's -built in unit-test and micro-benchmarking framework). If you're passing -arguments to both Cargo and the binary, the ones after `--` go to the binary, -the ones before go to Cargo. For details about libtest's arguments see the -output of `cargo test -- --help` and check out the rustc book's chapter on -how tests work at . - -As an example, this will filter for tests with `foo` in their name and run them -on 3 threads in parallel: - - cargo test foo -- --test-threads 3 - -Tests are built with the `--test` option to `rustc` which creates an -executable with a `main` function that automatically runs all functions -annotated with the `#[test]` attribute in multiple threads. `#[bench]` -annotated functions will also be run with one iteration to verify that they -are functional. - -The libtest harness may be disabled by setting `harness = false` in the target -manifest settings, in which case your code will need to provide its own `main` -function to handle running tests. - -Documentation tests are also run by default, which is handled by `rustdoc`. It -extracts code samples from documentation comments and executes them. See the -[rustdoc book](https://doc.rust-lang.org/rustdoc/) for more information on -writing doc tests. - -## OPTIONS - -### Test Options - -{{> options-test }} - -{{> section-package-selection }} - -### Target Selection - -When no target selection options are given, `cargo test` will build the -following targets of the selected packages: - -- lib โ€” used to link with binaries, examples, integration tests, and doc tests -- bins (only if integration tests are built and required features are - available) -- examples โ€” to ensure they compile -- lib as a unit test -- bins as unit tests -- integration tests -- doc tests for the lib target - -The default behavior can be changed by setting the `test` flag for the target -in the manifest settings. Setting examples to `test = true` will build and run -the example as a test. Setting targets to `test = false` will stop them from -being tested by default. Target selection options that take a target by name -ignore the `test` flag and will always test the given target. - -Doc tests for libraries may be disabled by setting `doctest = false` for the -library in the manifest. - -Binary targets are automatically built if there is an integration test or -benchmark. This allows an integration test to execute the binary to exercise -and test its behavior. The `CARGO_BIN_EXE_` -[environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates) -is set when the integration test is built so that it can use the -[`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the -executable. - -{{> options-targets }} - -{{#options}} - -{{#option "`--doc`" }} -Test only the library's documentation. This cannot be mixed with other -target options. -{{/option}} - -{{/options}} - -{{> section-features }} - -### Compilation Options - -{{#options}} - -{{> options-target-triple }} - -{{> options-release }} - -{{> options-profile }} - -{{> options-ignore-rust-version }} - -{{/options}} - -### Output Options - -{{#options}} -{{> options-target-dir }} -{{/options}} - -### Display Options - -By default the Rust test harness hides output from test execution to keep -results readable. Test output can be recovered (e.g., for debugging) by passing -`--nocapture` to the test binaries: - - cargo test -- --nocapture - -{{#options}} - -{{> options-display }} - -{{> options-message-format }} - -{{/options}} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -{{> section-options-common }} - -### Miscellaneous Options - -The `--jobs` argument affects the building of the test executable but does not -affect how many threads are used when running the tests. The Rust test harness -includes an option to control the number of threads used: - - cargo test -j 2 -- --test-threads=2 - -{{#options}} - -{{> options-jobs }} -{{> options-future-incompat }} - -{{/options}} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Execute all the unit and integration tests of the current package: - - cargo test - -2. Run only tests whose names match against a filter string: - - cargo test name_filter - -3. Run only a specific test within a specific integration test: - - cargo test --test int_test_name -- modname::test_name - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-bench" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-tree.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-tree.md deleted file mode 100644 index 7a6a8de6b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-tree.md +++ /dev/null @@ -1,251 +0,0 @@ -# cargo-tree(1) -{{*set actionverb="Display"}} -{{*set noall=true}} - -## NAME - -cargo-tree - Display a tree visualization of a dependency graph - -## SYNOPSIS - -`cargo tree` [_options_] - -## DESCRIPTION - -This command will display a tree of dependencies to the terminal. An example -of a simple project that depends on the "rand" package: - -``` -myproject v0.1.0 (/myproject) -โ””โ”€โ”€ rand v0.7.3 - โ”œโ”€โ”€ getrandom v0.1.14 - โ”‚ โ”œโ”€โ”€ cfg-if v0.1.10 - โ”‚ โ””โ”€โ”€ libc v0.2.68 - โ”œโ”€โ”€ libc v0.2.68 (*) - โ”œโ”€โ”€ rand_chacha v0.2.2 - โ”‚ โ”œโ”€โ”€ ppv-lite86 v0.2.6 - โ”‚ โ””โ”€โ”€ rand_core v0.5.1 - โ”‚ โ””โ”€โ”€ getrandom v0.1.14 (*) - โ””โ”€โ”€ rand_core v0.5.1 (*) -[build-dependencies] -โ””โ”€โ”€ cc v1.0.50 -``` - -Packages marked with `(*)` have been "de-duplicated". The dependencies for the -package have already been shown elsewhere in the graph, and so are not -repeated. Use the `--no-dedupe` option to repeat the duplicates. - -The `-e` flag can be used to select the dependency kinds to display. The -"features" kind changes the output to display the features enabled by -each dependency. For example, `cargo tree -e features`: - -``` -myproject v0.1.0 (/myproject) -โ””โ”€โ”€ log feature "serde" - โ””โ”€โ”€ log v0.4.8 - โ”œโ”€โ”€ serde v1.0.106 - โ””โ”€โ”€ cfg-if feature "default" - โ””โ”€โ”€ cfg-if v0.1.10 -``` - -In this tree, `myproject` depends on `log` with the `serde` feature. `log` in -turn depends on `cfg-if` with "default" features. When using `-e features` it -can be helpful to use `-i` flag to show how the features flow into a package. -See the examples below for more detail. - -## OPTIONS - -### Tree Options - -{{#options}} - -{{#option "`-i` _spec_" "`--invert` _spec_" }} -Show the reverse dependencies for the given package. This flag will invert -the tree and display the packages that depend on the given package. - -Note that in a workspace, by default it will only display the package's -reverse dependencies inside the tree of the workspace member in the current -directory. The `--workspace` flag can be used to extend it so that it will -show the package's reverse dependencies across the entire workspace. The `-p` -flag can be used to display the package's reverse dependencies only with the -subtree of the package given to `-p`. -{{/option}} - -{{#option "`--prune` _spec_" }} -Prune the given package from the display of the dependency tree. -{{/option}} - -{{#option "`--depth` _depth_" }} -Maximum display depth of the dependency tree. A depth of 1 displays the direct -dependencies, for example. -{{/option}} - -{{#option "`--no-dedupe`" }} -Do not de-duplicate repeated dependencies. Usually, when a package has already -displayed its dependencies, further occurrences will not re-display its -dependencies, and will include a `(*)` to indicate it has already been shown. -This flag will cause those duplicates to be repeated. -{{/option}} - -{{#option "`-d`" "`--duplicates`" }} -Show only dependencies which come in multiple versions (implies `--invert`). -When used with the `-p` flag, only shows duplicates within the subtree of the -given package. - -It can be beneficial for build times and executable sizes to avoid building -that same package multiple times. This flag can help identify the offending -packages. You can then investigate if the package that depends on the -duplicate with the older version can be updated to the newer version so that -only one instance is built. -{{/option}} - -{{#option "`-e` _kinds_" "`--edges` _kinds_" }} -The dependency kinds to display. Takes a comma separated list of values: - -- `all` โ€” Show all edge kinds. -- `normal` โ€” Show normal dependencies. -- `build` โ€” Show build dependencies. -- `dev` โ€” Show development dependencies. -- `features` โ€” Show features enabled by each dependency. If this is the only - kind given, then it will automatically include the other dependency kinds. -- `no-normal` โ€” Do not include normal dependencies. -- `no-build` โ€” Do not include build dependencies. -- `no-dev` โ€” Do not include development dependencies. -- `no-proc-macro` โ€” Do not include procedural macro dependencies. - -The `normal`, `build`, `dev`, and `all` dependency kinds cannot be mixed with -`no-normal`, `no-build`, or `no-dev` dependency kinds. - -The default is `normal,build,dev`. -{{/option}} - -{{#option "`--target` _triple_" }} -Filter dependencies matching the given target-triple. The default is the host -platform. Use the value `all` to include *all* targets. -{{/option}} - -{{/options}} - -### Tree Formatting Options - -{{#options}} - -{{#option "`--charset` _charset_" }} -Chooses the character set to use for the tree. Valid values are "utf8" or -"ascii". Default is "utf8". -{{/option}} - -{{#option "`-f` _format_" "`--format` _format_" }} -Set the format string for each package. The default is "{p}". - -This is an arbitrary string which will be used to display each package. The following -strings will be replaced with the corresponding value: - -- `{p}` โ€” The package name. -- `{l}` โ€” The package license. -- `{r}` โ€” The package repository URL. -- `{f}` โ€” Comma-separated list of package features that are enabled. -- `{lib}` โ€” The name, as used in a `use` statement, of the package's library. -{{/option}} - -{{#option "`--prefix` _prefix_" }} -Sets how each line is displayed. The _prefix_ value can be one of: - -- `indent` (default) โ€” Shows each line indented as a tree. -- `depth` โ€” Show as a list, with the numeric depth printed before each entry. -- `none` โ€” Show as a flat list. -{{/option}} - -{{/options}} - -{{> section-package-selection }} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -{{> section-features }} - -### Display Options - -{{#options}} - -{{> options-display }} - -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Display the tree for the package in the current directory: - - cargo tree - -2. Display all the packages that depend on the `syn` package: - - cargo tree -i syn - -3. Show the features enabled on each package: - - cargo tree --format "{p} {f}" - -4. Show all packages that are built multiple times. This can happen if multiple - semver-incompatible versions appear in the tree (like 1.0.0 and 2.0.0). - - cargo tree -d - -5. Explain why features are enabled for the `syn` package: - - cargo tree -e features -i syn - - The `-e features` flag is used to show features. The `-i` flag is used to - invert the graph so that it displays the packages that depend on `syn`. An - example of what this would display: - - ``` - syn v1.0.17 - โ”œโ”€โ”€ syn feature "clone-impls" - โ”‚ โ””โ”€โ”€ syn feature "default" - โ”‚ โ””โ”€โ”€ rustversion v1.0.2 - โ”‚ โ””โ”€โ”€ rustversion feature "default" - โ”‚ โ””โ”€โ”€ myproject v0.1.0 (/myproject) - โ”‚ โ””โ”€โ”€ myproject feature "default" (command-line) - โ”œโ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "derive" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "full" - โ”‚ โ””โ”€โ”€ rustversion v1.0.2 (*) - โ”œโ”€โ”€ syn feature "parsing" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "printing" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "proc-macro" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ””โ”€โ”€ syn feature "quote" - โ”œโ”€โ”€ syn feature "printing" (*) - โ””โ”€โ”€ syn feature "proc-macro" (*) - ``` - - To read this graph, you can follow the chain for each feature from the root - to see why it is included. For example, the "full" feature is added by the - `rustversion` crate which is included from `myproject` (with the default - features), and `myproject` is the package selected on the command-line. All - of the other `syn` features are added by the "default" feature ("quote" is - added by "printing" and "proc-macro", both of which are default features). - - If you're having difficulty cross-referencing the de-duplicated `(*)` - entries, try with the `--no-dedupe` flag to get the full output. - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-metadata" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-uninstall.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-uninstall.md deleted file mode 100644 index 73c50c7a1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-uninstall.md +++ /dev/null @@ -1,63 +0,0 @@ -# cargo-uninstall(1) - -## NAME - -cargo-uninstall - Remove a Rust binary - -## SYNOPSIS - -`cargo uninstall` [_options_] [_spec_...] - -## DESCRIPTION - -This command removes a package installed with {{man "cargo-install" 1}}. The _spec_ -argument is a package ID specification of the package to remove (see -{{man "cargo-pkgid" 1}}). - -By default all binaries are removed for a crate but the `--bin` and -`--example` flags can be used to only remove particular binaries. - -{{> description-install-root }} - -## OPTIONS - -### Install Options - -{{#options}} - -{{#option "`-p`" "`--package` _spec_..." }} -Package to uninstall. -{{/option}} - -{{#option "`--bin` _name_..." }} -Only uninstall the binary _name_. -{{/option}} - -{{#option "`--root` _dir_" }} -Directory to uninstall packages from. -{{/option}} - -{{/options}} - -### Display Options - -{{#options}} - -{{> options-display }} - -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Uninstall a previously installed package. - - cargo uninstall ripgrep - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-install" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-update.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-update.md deleted file mode 100644 index 54aa90ae8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-update.md +++ /dev/null @@ -1,97 +0,0 @@ -# cargo-update(1) - -## NAME - -cargo-update - Update dependencies as recorded in the local lock file - -## SYNOPSIS - -`cargo update` [_options_] - -## DESCRIPTION - -This command will update dependencies in the `Cargo.lock` file to the latest -version. If the `Cargo.lock` file does not exist, it will be created with the -latest available versions. - -## OPTIONS - -### Update Options - -{{#options}} - -{{#option "`-p` _spec_..." "`--package` _spec_..." }} -Update only the specified packages. This flag may be specified -multiple times. See {{man "cargo-pkgid" 1}} for the SPEC format. - -If packages are specified with the `-p` flag, then a conservative update of -the lockfile will be performed. This means that only the dependency specified -by SPEC will be updated. Its transitive dependencies will be updated only if -SPEC cannot be updated without updating dependencies. All other dependencies -will remain locked at their currently recorded versions. - -If `-p` is not specified, all dependencies are updated. -{{/option}} - -{{#option "`--aggressive`" }} -When used with `-p`, dependencies of _spec_ are forced to update as well. -Cannot be used with `--precise`. -{{/option}} - -{{#option "`--precise` _precise_" }} -When used with `-p`, allows you to specify a specific version number to set -the package to. If the package comes from a git repository, this can be a git -revision (such as a SHA hash or tag). -{{/option}} - -{{#option "`-w`" "`--workspace`" }} -Attempt to update only packages defined in the workspace. Other packages -are updated only if they don't already exist in the lockfile. This -option is useful for updating `Cargo.lock` after you've changed version -numbers in `Cargo.toml`. -{{/option}} - -{{#option "`--dry-run`" }} -Displays what would be updated, but doesn't actually write the lockfile. -{{/option}} - -{{/options}} - -### Display Options - -{{#options}} -{{> options-display }} -{{/options}} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Update all dependencies in the lockfile: - - cargo update - -2. Update only specific dependencies: - - cargo update -p foo -p bar - -3. Set a specific dependency to a specific version: - - cargo update -p foo --precise 1.2.3 - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-generate-lockfile" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-vendor.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-vendor.md deleted file mode 100644 index f7c453203..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-vendor.md +++ /dev/null @@ -1,93 +0,0 @@ -# cargo-vendor(1) - -## NAME - -cargo-vendor - Vendor all dependencies locally - -## SYNOPSIS - -`cargo vendor` [_options_] [_path_] - -## DESCRIPTION - -This cargo subcommand will vendor all crates.io and git dependencies for a -project into the specified directory at ``. After this command completes -the vendor directory specified by `` will contain all remote sources from -dependencies specified. Additional manifests beyond the default one can be -specified with the `-s` option. - -The `cargo vendor` command will also print out the configuration necessary -to use the vendored sources, which you will need to add to `.cargo/config.toml`. - -## OPTIONS - -### Vendor Options - -{{#options}} - -{{#option "`-s` _manifest_" "`--sync` _manifest_" }} -Specify extra `Cargo.toml` manifests to workspaces which should also be -vendored and synced to the output. -{{/option}} - -{{#option "`--no-delete`" }} -Don't delete the "vendor" directory when vendoring, but rather keep all -existing contents of the vendor directory -{{/option}} - -{{#option "`--respect-source-config`" }} -Instead of ignoring `[source]` configuration by default in `.cargo/config.toml` -read it and use it when downloading crates from crates.io, for example -{{/option}} - -{{#option "`--versioned-dirs`" }} -Normally versions are only added to disambiguate multiple versions of the -same package. This option causes all directories in the "vendor" directory -to be versioned, which makes it easier to track the history of vendored -packages over time, and can help with the performance of re-vendoring when -only a subset of the packages have changed. -{{/option}} - -{{/options}} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -### Display Options - -{{#options}} - -{{> options-display }} - -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Vendor all dependencies into a local "vendor" folder - - cargo vendor - -2. Vendor all dependencies into a local "third-party/vendor" folder - - cargo vendor third-party/vendor - -3. Vendor the current workspace as well as another to "vendor" - - cargo vendor -s ../path/to/Cargo.toml - -## SEE ALSO -{{man "cargo" 1}} - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-verify-project.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-verify-project.md deleted file mode 100644 index 99b749087..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-verify-project.md +++ /dev/null @@ -1,58 +0,0 @@ -# cargo-verify-project(1) - -## NAME - -cargo-verify-project - Check correctness of crate manifest - -## SYNOPSIS - -`cargo verify-project` [_options_] - -## DESCRIPTION - -This command will parse the local manifest and check its validity. It emits a -JSON object with the result. A successful validation will display: - - {"success":"true"} - -An invalid workspace will display: - - {"invalid":"human-readable error message"} - -## OPTIONS - -### Display Options - -{{#options}} - -{{> options-display }} - -{{/options}} - -### Manifest Options - -{{#options}} - -{{> options-manifest-path }} - -{{> options-locked }} - -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -## EXIT STATUS - -* `0`: The workspace is OK. -* `1`: The workspace is invalid. - -## EXAMPLES - -1. Check the current workspace for errors: - - cargo verify-project - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-package" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-version.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-version.md deleted file mode 100644 index c6e453532..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-version.md +++ /dev/null @@ -1,41 +0,0 @@ -# cargo-version(1) - -## NAME - -cargo-version - Show version information - -## SYNOPSIS - -`cargo version` [_options_] - -## DESCRIPTION - -Displays the version of Cargo. - -## OPTIONS - -{{#options}} - -{{#option "`-v`" "`--verbose`" }} -Display additional version information. -{{/option}} - -{{/options}} - -## EXAMPLES - -1. Display the version: - - cargo version - -2. The version is also available via flags: - - cargo --version - cargo -V - -3. Display extra version information: - - cargo -Vv - -## SEE ALSO -{{man "cargo" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-yank.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-yank.md deleted file mode 100644 index 0a3ba3965..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo-yank.md +++ /dev/null @@ -1,70 +0,0 @@ -# cargo-yank(1) - -## NAME - -cargo-yank - Remove a pushed crate from the index - -## SYNOPSIS - -`cargo yank` [_options_] `--vers` _version_ [_crate_] - -## DESCRIPTION - -The yank command removes a previously published crate's version from the -server's index. This command does not delete any data, and the crate will -still be available for download via the registry's download link. - -Note that existing crates locked to a yanked version will still be able to -download the yanked version to use it. Cargo will, however, not allow any new -crates to be locked to any yanked version. - -This command requires you to be authenticated with either the `--token` option -or using {{man "cargo-login" 1}}. - -If the crate name is not specified, it will use the package name from the -current directory. - -## OPTIONS - -### Yank Options - -{{#options}} - -{{#option "`--vers` _version_" }} -The version to yank or un-yank. -{{/option}} - -{{#option "`--undo`" }} -Undo a yank, putting a version back into the index. -{{/option}} - -{{> options-token }} - -{{> options-index }} - -{{> options-registry }} - -{{/options}} - -### Display Options - -{{#options}} - -{{> options-display }} - -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## EXAMPLES - -1. Yank a crate from the index: - - cargo yank --vers 1.0.7 foo - -## SEE ALSO -{{man "cargo" 1}}, {{man "cargo-login" 1}}, {{man "cargo-publish" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo.md deleted file mode 100644 index 9f2c622da..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/cargo.md +++ /dev/null @@ -1,235 +0,0 @@ -# cargo(1) - -## NAME - -cargo - The Rust package manager - -## SYNOPSIS - -`cargo` [_options_] _command_ [_args_]\ -`cargo` [_options_] `--version`\ -`cargo` [_options_] `--list`\ -`cargo` [_options_] `--help`\ -`cargo` [_options_] `--explain` _code_ - -## DESCRIPTION - -This program is a package manager and build tool for the Rust language, -available at . - -## COMMANDS - -### Build Commands - -{{man "cargo-bench" 1}}\ -    Execute benchmarks of a package. - -{{man "cargo-build" 1}}\ -    Compile a package. - -{{man "cargo-check" 1}}\ -    Check a local package and all of its dependencies for errors. - -{{man "cargo-clean" 1}}\ -    Remove artifacts that Cargo has generated in the past. - -{{man "cargo-doc" 1}}\ -    Build a package's documentation. - -{{man "cargo-fetch" 1}}\ -    Fetch dependencies of a package from the network. - -{{man "cargo-fix" 1}}\ -    Automatically fix lint warnings reported by rustc. - -{{man "cargo-run" 1}}\ -    Run a binary or example of the local package. - -{{man "cargo-rustc" 1}}\ -    Compile a package, and pass extra options to the compiler. - -{{man "cargo-rustdoc" 1}}\ -    Build a package's documentation, using specified custom flags. - -{{man "cargo-test" 1}}\ -    Execute unit and integration tests of a package. - -### Manifest Commands - -{{man "cargo-generate-lockfile" 1}}\ -    Generate `Cargo.lock` for a project. - -{{man "cargo-locate-project" 1}}\ -    Print a JSON representation of a `Cargo.toml` file's location. - -{{man "cargo-metadata" 1}}\ -    Output the resolved dependencies of a package in machine-readable format. - -{{man "cargo-pkgid" 1}}\ -    Print a fully qualified package specification. - -{{man "cargo-tree" 1}}\ -    Display a tree visualization of a dependency graph. - -{{man "cargo-update" 1}}\ -    Update dependencies as recorded in the local lock file. - -{{man "cargo-vendor" 1}}\ -    Vendor all dependencies locally. - -{{man "cargo-verify-project" 1}}\ -    Check correctness of crate manifest. - -### Package Commands - -{{man "cargo-init" 1}}\ -    Create a new Cargo package in an existing directory. - -{{man "cargo-install" 1}}\ -    Build and install a Rust binary. - -{{man "cargo-new" 1}}\ -    Create a new Cargo package. - -{{man "cargo-search" 1}}\ -    Search packages in crates.io. - -{{man "cargo-uninstall" 1}}\ -    Remove a Rust binary. - -### Publishing Commands - -{{man "cargo-login" 1}}\ -    Save an API token from the registry locally. - -{{man "cargo-owner" 1}}\ -    Manage the owners of a crate on the registry. - -{{man "cargo-package" 1}}\ -    Assemble the local package into a distributable tarball. - -{{man "cargo-publish" 1}}\ -    Upload a package to the registry. - -{{man "cargo-yank" 1}}\ -    Remove a pushed crate from the index. - -### General Commands - -{{man "cargo-help" 1}}\ -    Display help information about Cargo. - -{{man "cargo-version" 1}}\ -    Show version information. - -## OPTIONS - -### Special Options - -{{#options}} - -{{#option "`-V`" "`--version`" }} -Print version info and exit. If used with `--verbose`, prints extra -information. -{{/option}} - -{{#option "`--list`" }} -List all installed Cargo subcommands. If used with `--verbose`, prints extra -information. -{{/option}} - -{{#option "`--explain` _code_" }} -Run `rustc --explain CODE` which will print out a detailed explanation of an -error message (for example, `E0004`). -{{/option}} - -{{/options}} - -### Display Options - -{{#options}} - -{{> options-display }} - -{{/options}} - -### Manifest Options - -{{#options}} -{{> options-locked }} -{{/options}} - -{{> section-options-common }} - -{{> section-environment }} - -{{> section-exit-status }} - -## FILES - -`~/.cargo/`\ -    Default location for Cargo's "home" directory where it -stores various files. The location can be changed with the `CARGO_HOME` -environment variable. - -`$CARGO_HOME/bin/`\ -    Binaries installed by {{man "cargo-install" 1}} will be located here. If using -[rustup], executables distributed with Rust are also located here. - -`$CARGO_HOME/config.toml`\ -    The global configuration file. See [the reference](../reference/config.html) -for more information about configuration files. - -`.cargo/config.toml`\ -    Cargo automatically searches for a file named `.cargo/config.toml` in the -current directory, and all parent directories. These configuration files -will be merged with the global configuration file. - -`$CARGO_HOME/credentials.toml`\ -    Private authentication information for logging in to a registry. - -`$CARGO_HOME/registry/`\ -    This directory contains cached downloads of the registry index and any -downloaded dependencies. - -`$CARGO_HOME/git/`\ -    This directory contains cached downloads of git dependencies. - -Please note that the internal structure of the `$CARGO_HOME` directory is not -stable yet and may be subject to change. - -[rustup]: https://rust-lang.github.io/rustup/ - -## EXAMPLES - -1. Build a local package and all of its dependencies: - - cargo build - -2. Build a package with optimizations: - - cargo build --release - -3. Run tests for a cross-compiled target: - - cargo test --target i686-unknown-linux-gnu - -4. Create a new package that builds an executable: - - cargo new foobar - -5. Create a package in the current directory: - - mkdir foo && cd foo - cargo init . - -6. Learn about a command's options and usage: - - cargo help clean - -## BUGS - -See for issues. - -## SEE ALSO -{{man "rustc" 1}}, {{man "rustdoc" 1}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-bench.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-bench.txt deleted file mode 100644 index 4a773e8a3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-bench.txt +++ /dev/null @@ -1,367 +0,0 @@ -CARGO-BENCH(1) - -NAME - cargo-bench - Execute benchmarks of a package - -SYNOPSIS - cargo bench [options] [benchname] [-- bench-options] - -DESCRIPTION - Compile and execute benchmarks. - - The benchmark filtering argument benchname and all the arguments - following the two dashes (--) are passed to the benchmark binaries and - thus to libtest (rustc's built in unit-test and micro-benchmarking - framework). If you are passing arguments to both Cargo and the binary, - the ones after -- go to the binary, the ones before go to Cargo. For - details about libtest's arguments see the output of cargo bench -- - --help and check out the rustc book's chapter on how tests work at - . - - As an example, this will run only the benchmark named foo (and skip - other similarly named benchmarks like foobar): - - cargo bench -- foo --exact - - Benchmarks are built with the --test option to rustc which creates an - executable with a main function that automatically runs all functions - annotated with the #[bench] attribute. Cargo passes the --bench flag to - the test harness to tell it to run only benchmarks. - - The libtest harness may be disabled by setting harness = false in the - target manifest settings, in which case your code will need to provide - its own main function to handle running benchmarks. - - Note: The #[bench] attribute - - is currently unstable and only available on the nightly channel - . There - are some packages available on crates.io - that may help with running - benchmarks on the stable channel, such as Criterion - . - - By default, cargo bench uses the bench profile - , which - enables optimizations and disables debugging information. If you need to - debug a benchmark, you can use the --profile=dev command-line option to - switch to the dev profile. You can then run the debug-enabled benchmark - within a debugger. - -OPTIONS - Benchmark Options - --no-run - Compile, but don't run benchmarks. - - --no-fail-fast - Run all benchmarks regardless of failure. Without this flag, Cargo - will exit after the first executable fails. The Rust test harness - will run all benchmarks within the executable to completion, this - flag only applies to the executable as a whole. - - Package Selection - By default, when no package selection options are given, the packages - selected depend on the selected manifest file (based on the current - working directory if --manifest-path is not given). If the manifest is - the root of a workspace then the workspaces default members are - selected, otherwise only the package defined by the manifest will be - selected. - - The default members of a workspace can be set explicitly with the - workspace.default-members key in the root manifest. If this is not set, - a virtual workspace will include all workspace members (equivalent to - passing --workspace), and a non-virtual workspace will include only the - root crate itself. - - -p spec..., --package spec... - Benchmark only the specified packages. See cargo-pkgid(1) for the - SPEC format. This flag may be specified multiple times and supports - common Unix glob patterns like *, ? and []. However, to avoid your - shell accidentally expanding glob patterns before Cargo handles - them, you must use single quotes or double quotes around each - pattern. - - --workspace - Benchmark all members in the workspace. - - --all - Deprecated alias for --workspace. - - --exclude SPEC... - Exclude the specified packages. Must be used in conjunction with the - --workspace flag. This flag may be specified multiple times and - supports common Unix glob patterns like *, ? and []. However, to - avoid your shell accidentally expanding glob patterns before Cargo - handles them, you must use single quotes or double quotes around - each pattern. - - Target Selection - When no target selection options are given, cargo bench will build the - following targets of the selected packages: - - o lib โ€” used to link with binaries and benchmarks - - o bins (only if benchmark targets are built and required features are - available) - - o lib as a benchmark - - o bins as benchmarks - - o benchmark targets - - The default behavior can be changed by setting the bench flag for the - target in the manifest settings. Setting examples to bench = true will - build and run the example as a benchmark. Setting targets to bench = - false will stop them from being benchmarked by default. Target selection - options that take a target by name ignore the bench flag and will always - benchmark the given target. - - Passing target selection flags will benchmark only the specified - targets. - - Note that --bin, --example, --test and --bench flags also support common - Unix glob patterns like *, ? and []. However, to avoid your shell - accidentally expanding glob patterns before Cargo handles them, you must - use single quotes or double quotes around each glob pattern. - - --lib - Benchmark the package's library. - - --bin name... - Benchmark the specified binary. This flag may be specified multiple - times and supports common Unix glob patterns. - - --bins - Benchmark all binary targets. - - --example name... - Benchmark the specified example. This flag may be specified multiple - times and supports common Unix glob patterns. - - --examples - Benchmark all example targets. - - --test name... - Benchmark the specified integration test. This flag may be specified - multiple times and supports common Unix glob patterns. - - --tests - Benchmark all targets in test mode that have the test = true - manifest flag set. By default this includes the library and binaries - built as unittests, and integration tests. Be aware that this will - also build any required dependencies, so the lib target may be built - twice (once as a unittest, and once as a dependency for binaries, - integration tests, etc.). Targets may be enabled or disabled by - setting the test flag in the manifest settings for the target. - - --bench name... - Benchmark the specified benchmark. This flag may be specified - multiple times and supports common Unix glob patterns. - - --benches - Benchmark all targets in benchmark mode that have the bench = true - manifest flag set. By default this includes the library and binaries - built as benchmarks, and bench targets. Be aware that this will also - build any required dependencies, so the lib target may be built - twice (once as a benchmark, and once as a dependency for binaries, - benchmarks, etc.). Targets may be enabled or disabled by setting the - bench flag in the manifest settings for the target. - - --all-targets - Benchmark all targets. This is equivalent to specifying --lib --bins - --tests --benches --examples. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Benchmark for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - --profile name - Benchmark with the given profile. See the the reference - for more - details on profiles. - - --ignore-rust-version - Benchmark the target even if the selected Rust compiler is older - than the required Rust version as configured in the project's - rust-version field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Display Options - By default the Rust test harness hides output from benchmark execution - to keep results readable. Benchmark output can be recovered (e.g., for - debugging) by passing --nocapture to the benchmark binaries: - - cargo bench -- --nocapture - - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - The --jobs argument affects the building of the benchmark executable but - does not affect how many threads are used when running the benchmarks. - The Rust test harness runs benchmarks serially in a single thread. - - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Build and execute all the benchmarks of the current package: - - cargo bench - - 2. Run only a specific benchmark within a specific benchmark target: - - cargo bench --bench bench_name -- modname::some_benchmark - -SEE ALSO - cargo(1), cargo-test(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-build.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-build.txt deleted file mode 100644 index 5fa54ab0e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-build.txt +++ /dev/null @@ -1,319 +0,0 @@ -CARGO-BUILD(1) - -NAME - cargo-build - Compile the current package - -SYNOPSIS - cargo build [options] - -DESCRIPTION - Compile local packages and all of their dependencies. - -OPTIONS - Package Selection - By default, when no package selection options are given, the packages - selected depend on the selected manifest file (based on the current - working directory if --manifest-path is not given). If the manifest is - the root of a workspace then the workspaces default members are - selected, otherwise only the package defined by the manifest will be - selected. - - The default members of a workspace can be set explicitly with the - workspace.default-members key in the root manifest. If this is not set, - a virtual workspace will include all workspace members (equivalent to - passing --workspace), and a non-virtual workspace will include only the - root crate itself. - - -p spec..., --package spec... - Build only the specified packages. See cargo-pkgid(1) for the SPEC - format. This flag may be specified multiple times and supports - common Unix glob patterns like *, ? and []. However, to avoid your - shell accidentally expanding glob patterns before Cargo handles - them, you must use single quotes or double quotes around each - pattern. - - --workspace - Build all members in the workspace. - - --all - Deprecated alias for --workspace. - - --exclude SPEC... - Exclude the specified packages. Must be used in conjunction with the - --workspace flag. This flag may be specified multiple times and - supports common Unix glob patterns like *, ? and []. However, to - avoid your shell accidentally expanding glob patterns before Cargo - handles them, you must use single quotes or double quotes around - each pattern. - - Target Selection - When no target selection options are given, cargo build will build all - binary and library targets of the selected packages. Binaries are - skipped if they have required-features that are missing. - - Passing target selection flags will build only the specified targets. - - Note that --bin, --example, --test and --bench flags also support common - Unix glob patterns like *, ? and []. However, to avoid your shell - accidentally expanding glob patterns before Cargo handles them, you must - use single quotes or double quotes around each glob pattern. - - --lib - Build the package's library. - - --bin name... - Build the specified binary. This flag may be specified multiple - times and supports common Unix glob patterns. - - --bins - Build all binary targets. - - --example name... - Build the specified example. This flag may be specified multiple - times and supports common Unix glob patterns. - - --examples - Build all example targets. - - --test name... - Build the specified integration test. This flag may be specified - multiple times and supports common Unix glob patterns. - - --tests - Build all targets in test mode that have the test = true manifest - flag set. By default this includes the library and binaries built as - unittests, and integration tests. Be aware that this will also build - any required dependencies, so the lib target may be built twice - (once as a unittest, and once as a dependency for binaries, - integration tests, etc.). Targets may be enabled or disabled by - setting the test flag in the manifest settings for the target. - - --bench name... - Build the specified benchmark. This flag may be specified multiple - times and supports common Unix glob patterns. - - --benches - Build all targets in benchmark mode that have the bench = true - manifest flag set. By default this includes the library and binaries - built as benchmarks, and bench targets. Be aware that this will also - build any required dependencies, so the lib target may be built - twice (once as a benchmark, and once as a dependency for binaries, - benchmarks, etc.). Targets may be enabled or disabled by setting the - bench flag in the manifest settings for the target. - - --all-targets - Build all targets. This is equivalent to specifying --lib --bins - --tests --benches --examples. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Build for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - -r, --release - Build optimized artifacts with the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Build with the given profile. See the the reference - for more - details on profiles. - - --ignore-rust-version - Build the target even if the selected Rust compiler is older than - the required Rust version as configured in the project's - rust-version field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - --out-dir directory - Copy final artifacts to this directory. - - This option is unstable and available only on the nightly channel - and - requires the -Z unstable-options flag to enable. See - for more - information. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - --build-plan - Outputs a series of JSON messages to stdout that indicate the - commands to run the build. - - This option is unstable and available only on the nightly channel - and - requires the -Z unstable-options flag to enable. See - for more - information. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - - --future-incompat-report - Displays a future-incompat report for any future-incompatible - warnings produced during execution of this command - - See cargo-report(1) - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Build the local package and all of its dependencies: - - cargo build - - 2. Build with optimizations: - - cargo build --release - -SEE ALSO - cargo(1), cargo-rustc(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-check.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-check.txt deleted file mode 100644 index 2f63b5683..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-check.txt +++ /dev/null @@ -1,313 +0,0 @@ -CARGO-CHECK(1) - -NAME - cargo-check - Check the current package - -SYNOPSIS - cargo check [options] - -DESCRIPTION - Check a local package and all of its dependencies for errors. This will - essentially compile the packages without performing the final step of - code generation, which is faster than running cargo build. The compiler - will save metadata files to disk so that future runs will reuse them if - the source has not been modified. Some diagnostics and errors are only - emitted during code generation, so they inherently won't be reported - with cargo check. - -OPTIONS - Package Selection - By default, when no package selection options are given, the packages - selected depend on the selected manifest file (based on the current - working directory if --manifest-path is not given). If the manifest is - the root of a workspace then the workspaces default members are - selected, otherwise only the package defined by the manifest will be - selected. - - The default members of a workspace can be set explicitly with the - workspace.default-members key in the root manifest. If this is not set, - a virtual workspace will include all workspace members (equivalent to - passing --workspace), and a non-virtual workspace will include only the - root crate itself. - - -p spec..., --package spec... - Check only the specified packages. See cargo-pkgid(1) for the SPEC - format. This flag may be specified multiple times and supports - common Unix glob patterns like *, ? and []. However, to avoid your - shell accidentally expanding glob patterns before Cargo handles - them, you must use single quotes or double quotes around each - pattern. - - --workspace - Check all members in the workspace. - - --all - Deprecated alias for --workspace. - - --exclude SPEC... - Exclude the specified packages. Must be used in conjunction with the - --workspace flag. This flag may be specified multiple times and - supports common Unix glob patterns like *, ? and []. However, to - avoid your shell accidentally expanding glob patterns before Cargo - handles them, you must use single quotes or double quotes around - each pattern. - - Target Selection - When no target selection options are given, cargo check will check all - binary and library targets of the selected packages. Binaries are - skipped if they have required-features that are missing. - - Passing target selection flags will check only the specified targets. - - Note that --bin, --example, --test and --bench flags also support common - Unix glob patterns like *, ? and []. However, to avoid your shell - accidentally expanding glob patterns before Cargo handles them, you must - use single quotes or double quotes around each glob pattern. - - --lib - Check the package's library. - - --bin name... - Check the specified binary. This flag may be specified multiple - times and supports common Unix glob patterns. - - --bins - Check all binary targets. - - --example name... - Check the specified example. This flag may be specified multiple - times and supports common Unix glob patterns. - - --examples - Check all example targets. - - --test name... - Check the specified integration test. This flag may be specified - multiple times and supports common Unix glob patterns. - - --tests - Check all targets in test mode that have the test = true manifest - flag set. By default this includes the library and binaries built as - unittests, and integration tests. Be aware that this will also build - any required dependencies, so the lib target may be built twice - (once as a unittest, and once as a dependency for binaries, - integration tests, etc.). Targets may be enabled or disabled by - setting the test flag in the manifest settings for the target. - - --bench name... - Check the specified benchmark. This flag may be specified multiple - times and supports common Unix glob patterns. - - --benches - Check all targets in benchmark mode that have the bench = true - manifest flag set. By default this includes the library and binaries - built as benchmarks, and bench targets. Be aware that this will also - build any required dependencies, so the lib target may be built - twice (once as a benchmark, and once as a dependency for binaries, - benchmarks, etc.). Targets may be enabled or disabled by setting the - bench flag in the manifest settings for the target. - - --all-targets - Check all targets. This is equivalent to specifying --lib --bins - --tests --benches --examples. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Check for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - -r, --release - Check optimized artifacts with the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Check with the given profile. - - As a special case, specifying the test profile will also enable - checking in test mode which will enable checking tests and enable - the test cfg option. See rustc tests - for more detail. - - See the the reference - for more - details on profiles. - - --ignore-rust-version - Check the target even if the selected Rust compiler is older than - the required Rust version as configured in the project's - rust-version field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - - --future-incompat-report - Displays a future-incompat report for any future-incompatible - warnings produced during execution of this command - - See cargo-report(1) - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Check the local package for errors: - - cargo check - - 2. Check all targets, including unit tests: - - cargo check --all-targets --profile=test - -SEE ALSO - cargo(1), cargo-build(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-clean.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-clean.txt deleted file mode 100644 index bec65ca11..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-clean.txt +++ /dev/null @@ -1,149 +0,0 @@ -CARGO-CLEAN(1) - -NAME - cargo-clean - Remove generated artifacts - -SYNOPSIS - cargo clean [options] - -DESCRIPTION - Remove artifacts from the target directory that Cargo has generated in - the past. - - With no options, cargo clean will delete the entire target directory. - -OPTIONS - Package Selection - When no packages are selected, all packages and all dependencies in the - workspace are cleaned. - - -p spec..., --package spec... - Clean only the specified packages. This flag may be specified - multiple times. See cargo-pkgid(1) for the SPEC format. - - Clean Options - --doc - This option will cause cargo clean to remove only the doc directory - in the target directory. - - --release - Remove all artifacts in the release directory. - - --profile name - Remove all artifacts in the directory with the given profile name. - - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - --target triple - Clean for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Remove the entire target directory: - - cargo clean - - 2. Remove only the release artifacts: - - cargo clean --release - -SEE ALSO - cargo(1), cargo-build(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-doc.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-doc.txt deleted file mode 100644 index 7eaccd30b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-doc.txt +++ /dev/null @@ -1,275 +0,0 @@ -CARGO-DOC(1) - -NAME - cargo-doc - Build a package's documentation - -SYNOPSIS - cargo doc [options] - -DESCRIPTION - Build the documentation for the local package and all dependencies. The - output is placed in target/doc in rustdoc's usual format. - -OPTIONS - Documentation Options - --open - Open the docs in a browser after building them. This will use your - default browser unless you define another one in the BROWSER - environment variable or use the doc.browser - - configuration option. - - --no-deps - Do not build documentation for dependencies. - - --document-private-items - Include non-public items in the documentation. This will be enabled - by default if documenting a binary target. - - Package Selection - By default, when no package selection options are given, the packages - selected depend on the selected manifest file (based on the current - working directory if --manifest-path is not given). If the manifest is - the root of a workspace then the workspaces default members are - selected, otherwise only the package defined by the manifest will be - selected. - - The default members of a workspace can be set explicitly with the - workspace.default-members key in the root manifest. If this is not set, - a virtual workspace will include all workspace members (equivalent to - passing --workspace), and a non-virtual workspace will include only the - root crate itself. - - -p spec..., --package spec... - Document only the specified packages. See cargo-pkgid(1) for the - SPEC format. This flag may be specified multiple times and supports - common Unix glob patterns like *, ? and []. However, to avoid your - shell accidentally expanding glob patterns before Cargo handles - them, you must use single quotes or double quotes around each - pattern. - - --workspace - Document all members in the workspace. - - --all - Deprecated alias for --workspace. - - --exclude SPEC... - Exclude the specified packages. Must be used in conjunction with the - --workspace flag. This flag may be specified multiple times and - supports common Unix glob patterns like *, ? and []. However, to - avoid your shell accidentally expanding glob patterns before Cargo - handles them, you must use single quotes or double quotes around - each pattern. - - Target Selection - When no target selection options are given, cargo doc will document all - binary and library targets of the selected package. The binary will be - skipped if its name is the same as the lib target. Binaries are skipped - if they have required-features that are missing. - - The default behavior can be changed by setting doc = false for the - target in the manifest settings. Using target selection options will - ignore the doc flag and will always document the given target. - - --lib - Document the package's library. - - --bin name... - Document the specified binary. This flag may be specified multiple - times and supports common Unix glob patterns. - - --bins - Document all binary targets. - - --example name... - Document the specified example. This flag may be specified multiple - times and supports common Unix glob patterns. - - --examples - Document all example targets. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Document for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - -r, --release - Document optimized artifacts with the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Document with the given profile. See the the reference - for more - details on profiles. - - --ignore-rust-version - Document the target even if the selected Rust compiler is older than - the required Rust version as configured in the project's - rust-version field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Build the local package documentation and its dependencies and output - to target/doc. - - cargo doc - -SEE ALSO - cargo(1), cargo-rustdoc(1), rustdoc(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-fetch.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-fetch.txt deleted file mode 100644 index 91664c46b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-fetch.txt +++ /dev/null @@ -1,130 +0,0 @@ -CARGO-FETCH(1) - -NAME - cargo-fetch - Fetch dependencies of a package from the network - -SYNOPSIS - cargo fetch [options] - -DESCRIPTION - If a Cargo.lock file is available, this command will ensure that all of - the git dependencies and/or registry dependencies are downloaded and - locally available. Subsequent Cargo commands never touch the network - after a cargo fetch unless the lock file changes. - - If the lock file is not available, then this command will generate the - lock file before fetching the dependencies. - - If --target is not specified, then all target dependencies are fetched. - - See also the cargo-prefetch - plugin which adds a command to download popular crates. This may be - useful if you plan to use Cargo without a network with the --offline - flag. - -OPTIONS - Fetch options - --target triple - Fetch for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Fetch all dependencies: - - cargo fetch - -SEE ALSO - cargo(1), cargo-update(1), cargo-generate-lockfile(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-fix.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-fix.txt deleted file mode 100644 index 1e22f364c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-fix.txt +++ /dev/null @@ -1,384 +0,0 @@ -CARGO-FIX(1) - -NAME - cargo-fix - Automatically fix lint warnings reported by rustc - -SYNOPSIS - cargo fix [options] - -DESCRIPTION - This Cargo subcommand will automatically take rustc's suggestions from - diagnostics like warnings and apply them to your source code. This is - intended to help automate tasks that rustc itself already knows how to - tell you to fix! - - Executing cargo fix will under the hood execute cargo-check(1). Any - warnings applicable to your crate will be automatically fixed (if - possible) and all remaining warnings will be displayed when the check - process is finished. For example if you'd like to apply all fixes to the - current package, you can run: - - cargo fix - - which behaves the same as cargo check --all-targets. - - cargo fix is only capable of fixing code that is normally compiled with - cargo check. If code is conditionally enabled with optional features, - you will need to enable those features for that code to be analyzed: - - cargo fix --features foo - - Similarly, other cfg expressions like platform-specific code will need - to pass --target to fix code for the given target. - - cargo fix --target x86_64-pc-windows-gnu - - If you encounter any problems with cargo fix or otherwise have any - questions or feature requests please don't hesitate to file an issue at - . - - Edition migration - The cargo fix subcommand can also be used to migrate a package from one - edition - - to the next. The general procedure is: - - 1. Run cargo fix --edition. Consider also using the --all-features flag - if your project has multiple features. You may also want to run cargo - fix --edition multiple times with different --target flags if your - project has platform-specific code gated by cfg attributes. - - 2. Modify Cargo.toml to set the edition field - - to the new edition. - - 3. Run your project tests to verify that everything still works. If new - warnings are issued, you may want to consider running cargo fix again - (without the --edition flag) to apply any suggestions given by the - compiler. - - And hopefully that's it! Just keep in mind of the caveats mentioned - above that cargo fix cannot update code for inactive features or cfg - expressions. Also, in some rare cases the compiler is unable to - automatically migrate all code to the new edition, and this may require - manual changes after building with the new edition. - -OPTIONS - Fix options - --broken-code - Fix code even if it already has compiler errors. This is useful if - cargo fix fails to apply the changes. It will apply the changes and - leave the broken code in the working directory for you to inspect - and manually fix. - - --edition - Apply changes that will update the code to the next edition. This - will not update the edition in the Cargo.toml manifest, which must - be updated manually after cargo fix --edition has finished. - - --edition-idioms - Apply suggestions that will update code to the preferred style for - the current edition. - - --allow-no-vcs - Fix code even if a VCS was not detected. - - --allow-dirty - Fix code even if the working directory has changes. - - --allow-staged - Fix code even if the working directory has staged changes. - - Package Selection - By default, when no package selection options are given, the packages - selected depend on the selected manifest file (based on the current - working directory if --manifest-path is not given). If the manifest is - the root of a workspace then the workspaces default members are - selected, otherwise only the package defined by the manifest will be - selected. - - The default members of a workspace can be set explicitly with the - workspace.default-members key in the root manifest. If this is not set, - a virtual workspace will include all workspace members (equivalent to - passing --workspace), and a non-virtual workspace will include only the - root crate itself. - - -p spec..., --package spec... - Fix only the specified packages. See cargo-pkgid(1) for the SPEC - format. This flag may be specified multiple times and supports - common Unix glob patterns like *, ? and []. However, to avoid your - shell accidentally expanding glob patterns before Cargo handles - them, you must use single quotes or double quotes around each - pattern. - - --workspace - Fix all members in the workspace. - - --all - Deprecated alias for --workspace. - - --exclude SPEC... - Exclude the specified packages. Must be used in conjunction with the - --workspace flag. This flag may be specified multiple times and - supports common Unix glob patterns like *, ? and []. However, to - avoid your shell accidentally expanding glob patterns before Cargo - handles them, you must use single quotes or double quotes around - each pattern. - - Target Selection - When no target selection options are given, cargo fix will fix all - targets (--all-targets implied). Binaries are skipped if they have - required-features that are missing. - - Passing target selection flags will fix only the specified targets. - - Note that --bin, --example, --test and --bench flags also support common - Unix glob patterns like *, ? and []. However, to avoid your shell - accidentally expanding glob patterns before Cargo handles them, you must - use single quotes or double quotes around each glob pattern. - - --lib - Fix the package's library. - - --bin name... - Fix the specified binary. This flag may be specified multiple times - and supports common Unix glob patterns. - - --bins - Fix all binary targets. - - --example name... - Fix the specified example. This flag may be specified multiple times - and supports common Unix glob patterns. - - --examples - Fix all example targets. - - --test name... - Fix the specified integration test. This flag may be specified - multiple times and supports common Unix glob patterns. - - --tests - Fix all targets in test mode that have the test = true manifest flag - set. By default this includes the library and binaries built as - unittests, and integration tests. Be aware that this will also build - any required dependencies, so the lib target may be built twice - (once as a unittest, and once as a dependency for binaries, - integration tests, etc.). Targets may be enabled or disabled by - setting the test flag in the manifest settings for the target. - - --bench name... - Fix the specified benchmark. This flag may be specified multiple - times and supports common Unix glob patterns. - - --benches - Fix all targets in benchmark mode that have the bench = true - manifest flag set. By default this includes the library and binaries - built as benchmarks, and bench targets. Be aware that this will also - build any required dependencies, so the lib target may be built - twice (once as a benchmark, and once as a dependency for binaries, - benchmarks, etc.). Targets may be enabled or disabled by setting the - bench flag in the manifest settings for the target. - - --all-targets - Fix all targets. This is equivalent to specifying --lib --bins - --tests --benches --examples. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Fix for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - -r, --release - Fix optimized artifacts with the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Fix with the given profile. - - As a special case, specifying the test profile will also enable - checking in test mode which will enable checking tests and enable - the test cfg option. See rustc tests - for more detail. - - See the the reference - for more - details on profiles. - - --ignore-rust-version - Fix the target even if the selected Rust compiler is older than the - required Rust version as configured in the project's rust-version - field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Apply compiler suggestions to the local package: - - cargo fix - - 2. Update a package to prepare it for the next edition: - - cargo fix --edition - - 3. Apply suggested idioms for the current edition: - - cargo fix --edition-idioms - -SEE ALSO - cargo(1), cargo-check(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-generate-lockfile.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-generate-lockfile.txt deleted file mode 100644 index 13f113790..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-generate-lockfile.txt +++ /dev/null @@ -1,106 +0,0 @@ -CARGO-GENERATE-LOCKFILE(1) - -NAME - cargo-generate-lockfile - Generate the lockfile for a package - -SYNOPSIS - cargo generate-lockfile [options] - -DESCRIPTION - This command will create the Cargo.lock lockfile for the current package - or workspace. If the lockfile already exists, it will be rebuilt with - the latest available version of every package. - - See also cargo-update(1) which is also capable of creating a Cargo.lock - lockfile and has more options for controlling update behavior. - -OPTIONS - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Create or update the lockfile for the current package or workspace: - - cargo generate-lockfile - -SEE ALSO - cargo(1), cargo-update(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-help.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-help.txt deleted file mode 100644 index ea87ddfdb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-help.txt +++ /dev/null @@ -1,23 +0,0 @@ -CARGO-HELP(1) - -NAME - cargo-help - Get help for a Cargo command - -SYNOPSIS - cargo help [subcommand] - -DESCRIPTION - Prints a help message for the given command. - -EXAMPLES - 1. Get help for a command: - - cargo help build - - 2. Help is also available with the --help flag: - - cargo build --help - -SEE ALSO - cargo(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-init.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-init.txt deleted file mode 100644 index f95348a66..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-init.txt +++ /dev/null @@ -1,114 +0,0 @@ -CARGO-INIT(1) - -NAME - cargo-init - Create a new Cargo package in an existing directory - -SYNOPSIS - cargo init [options] [path] - -DESCRIPTION - This command will create a new Cargo manifest in the current directory. - Give a path as an argument to create in the given directory. - - If there are typically-named Rust source files already in the directory, - those will be used. If not, then a sample src/main.rs file will be - created, or src/lib.rs if --lib is passed. - - If the directory is not already in a VCS repository, then a new - repository is created (see --vcs below). - - See cargo-new(1) for a similar command which will create a new package - in a new directory. - -OPTIONS - Init Options - --bin - Create a package with a binary target (src/main.rs). This is the - default behavior. - - --lib - Create a package with a library target (src/lib.rs). - - --edition edition - Specify the Rust edition to use. Default is 2021. Possible values: - 2015, 2018, 2021 - - --name name - Set the package name. Defaults to the directory name. - - --vcs vcs - Initialize a new VCS repository for the given version control system - (git, hg, pijul, or fossil) or do not initialize any version control - at all (none). If not specified, defaults to git or the - configuration value cargo-new.vcs, or none if already inside a VCS - repository. - - --registry registry - This sets the publish field in Cargo.toml to the given registry name - which will restrict publishing only to that registry. - - Registry names are defined in Cargo config files - . If not - specified, the default registry defined by the registry.default - config key is used. If the default registry is not set and - --registry is not used, the publish field will not be set which - means that publishing will not be restricted. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Create a binary Cargo package in the current directory: - - cargo init - -SEE ALSO - cargo(1), cargo-new(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-install.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-install.txt deleted file mode 100644 index 9cb8c1d97..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-install.txt +++ /dev/null @@ -1,335 +0,0 @@ -CARGO-INSTALL(1) - -NAME - cargo-install - Build and install a Rust binary - -SYNOPSIS - cargo install [options] crate... - cargo install [options] --path path - cargo install [options] --git url [crate...] - cargo install [options] --list - -DESCRIPTION - This command manages Cargo's local set of installed binary crates. Only - packages which have executable [[bin]] or [[example]] targets can be - installed, and all executables are installed into the installation - root's bin folder. - - The installation root is determined, in order of precedence: - - o --root option - - o CARGO_INSTALL_ROOT environment variable - - o install.root Cargo config value - - - o CARGO_HOME environment variable - - o $HOME/.cargo - - There are multiple sources from which a crate can be installed. The - default location is crates.io but the --git, --path, and --registry - flags can change this source. If the source contains more than one - package (such as crates.io or a git repository with multiple crates) the - crate argument is required to indicate which crate should be installed. - - Crates from crates.io can optionally specify the version they wish to - install via the --version flags, and similarly packages from git - repositories can optionally specify the branch, tag, or revision that - should be installed. If a crate has multiple binaries, the --bin - argument can selectively install only one of them, and if you'd rather - install examples the --example argument can be used as well. - - If the package is already installed, Cargo will reinstall it if the - installed version does not appear to be up-to-date. If any of the - following values change, then Cargo will reinstall the package: - - o The package version and source. - - o The set of binary names installed. - - o The chosen features. - - o The profile (--profile). - - o The target (--target). - - Installing with --path will always build and install, unless there are - conflicting binaries from another package. The --force flag may be used - to force Cargo to always reinstall the package. - - If the source is crates.io or --git then by default the crate will be - built in a temporary target directory. To avoid this, the target - directory can be specified by setting the CARGO_TARGET_DIR environment - variable to a relative path. In particular, this can be useful for - caching build artifacts on continuous integration systems. - - By default, the Cargo.lock file that is included with the package will - be ignored. This means that Cargo will recompute which versions of - dependencies to use, possibly using newer versions that have been - released since the package was published. The --locked flag can be used - to force Cargo to use the packaged Cargo.lock file if it is available. - This may be useful for ensuring reproducible builds, to use the exact - same set of dependencies that were available when the package was - published. It may also be useful if a newer version of a dependency is - published that no longer builds on your system, or has other problems. - The downside to using --locked is that you will not receive any fixes or - updates to any dependency. Note that Cargo did not start publishing - Cargo.lock files until version 1.37, which means packages published with - prior versions will not have a Cargo.lock file available. - -OPTIONS - Install Options - --vers version, --version version - Specify a version to install. This may be a version requirement - , - like ~1.2, to have Cargo select the newest version from the given - requirement. If the version does not have a requirement operator - (such as ^ or ~), then it must be in the form MAJOR.MINOR.PATCH, and - will install exactly that version; it is not treated as a caret - requirement like Cargo dependencies are. - - --git url - Git URL to install the specified crate from. - - --branch branch - Branch to use when installing from git. - - --tag tag - Tag to use when installing from git. - - --rev sha - Specific commit to use when installing from git. - - --path path - Filesystem path to local crate to install. - - --list - List all installed packages and their versions. - - -f, --force - Force overwriting existing crates or binaries. This can be used if a - package has installed a binary with the same name as another - package. This is also useful if something has changed on the system - that you want to rebuild with, such as a newer version of rustc. - - --no-track - By default, Cargo keeps track of the installed packages with a - metadata file stored in the installation root directory. This flag - tells Cargo not to use or create that file. With this flag, Cargo - will refuse to overwrite any existing files unless the --force flag - is used. This also disables Cargo's ability to protect against - multiple concurrent invocations of Cargo installing at the same - time. - - --bin name... - Install only the specified binary. - - --bins - Install all binaries. - - --example name... - Install only the specified example. - - --examples - Install all examples. - - --root dir - Directory to install packages into. - - --registry registry - Name of the registry to use. Registry names are defined in Cargo - config files - . If not - specified, the default registry is used, which is defined by the - registry.default config key which defaults to crates-io. - - --index index - The URL of the registry index to use. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Install for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - a new temporary folder located in the temporary directory of the - platform. - - When using --path, by default it will use target directory in the - workspace of the local crate unless --target-dir is specified. - - --debug - Build with the dev profile instead the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Install with the given profile. See the the reference - for more - details on profiles. - - Manifest Options - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Install or upgrade a package from crates.io: - - cargo install ripgrep - - 2. Install or reinstall the package in the current directory: - - cargo install --path . - - 3. View the list of installed packages: - - cargo install --list - -SEE ALSO - cargo(1), cargo-uninstall(1), cargo-search(1), cargo-publish(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-locate-project.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-locate-project.txt deleted file mode 100644 index 38cbe9630..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-locate-project.txt +++ /dev/null @@ -1,89 +0,0 @@ -CARGO-LOCATE-PROJECT(1) - -NAME - cargo-locate-project - Print a JSON representation of a Cargo.toml - file's location - -SYNOPSIS - cargo locate-project [options] - -DESCRIPTION - This command will print a JSON object to stdout with the full path to - the Cargo.toml manifest. - -OPTIONS - --workspace - Locate the Cargo.toml at the root of the workspace, as opposed to - the current workspace member. - - Display Options - --message-format fmt - The representation in which to print the project location. Valid - values: - - o json (default): JSON object with the path under the key "root". - - o plain: Just the path. - - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Display the path to the manifest based on the current directory: - - cargo locate-project - -SEE ALSO - cargo(1), cargo-metadata(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-login.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-login.txt deleted file mode 100644 index 888a10100..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-login.txt +++ /dev/null @@ -1,89 +0,0 @@ -CARGO-LOGIN(1) - -NAME - cargo-login - Save an API token from the registry locally - -SYNOPSIS - cargo login [options] [token] - -DESCRIPTION - This command will save the API token to disk so that commands that - require authentication, such as cargo-publish(1), will be automatically - authenticated. The token is saved in $CARGO_HOME/credentials.toml. - CARGO_HOME defaults to .cargo in your home directory. - - If the token argument is not specified, it will be read from stdin. - - The API token for crates.io may be retrieved from - . - - Take care to keep the token secret, it should not be shared with anyone - else. - -OPTIONS - Login Options - --registry registry - Name of the registry to use. Registry names are defined in Cargo - config files - . If not - specified, the default registry is used, which is defined by the - registry.default config key which defaults to crates-io. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Save the API token to disk: - - cargo login - -SEE ALSO - cargo(1), cargo-publish(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-metadata.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-metadata.txt deleted file mode 100644 index ddb0ee3ca..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-metadata.txt +++ /dev/null @@ -1,418 +0,0 @@ -CARGO-METADATA(1) - -NAME - cargo-metadata - Machine-readable metadata about the current package - -SYNOPSIS - cargo metadata [options] - -DESCRIPTION - Output JSON to stdout containing information about the workspace members - and resolved dependencies of the current package. - - It is recommended to include the --format-version flag to future-proof - your code to ensure the output is in the format you are expecting. - - See the cargo_metadata crate - for a Rust API for reading the metadata. - -OUTPUT FORMAT - The output has the following format: - - { - /* Array of all packages in the workspace. - It also includes all feature-enabled dependencies unless --no-deps is used. - */ - "packages": [ - { - /* The name of the package. */ - "name": "my-package", - /* The version of the package. */ - "version": "0.1.0", - /* The Package ID, a unique identifier for referring to the package. */ - "id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* The license value from the manifest, or null. */ - "license": "MIT/Apache-2.0", - /* The license-file value from the manifest, or null. */ - "license_file": "LICENSE", - /* The description value from the manifest, or null. */ - "description": "Package description.", - /* The source ID of the package. This represents where - a package is retrieved from. - This is null for path dependencies and workspace members. - For other dependencies, it is a string with the format: - - "registry+URL" for registry-based dependencies. - Example: "registry+https://github.com/rust-lang/crates.io-index" - - "git+URL" for git-based dependencies. - Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" - */ - "source": null, - /* Array of dependencies declared in the package's manifest. */ - "dependencies": [ - { - /* The name of the dependency. */ - "name": "bitflags", - /* The source ID of the dependency. May be null, see - description for the package source. - */ - "source": "registry+https://github.com/rust-lang/crates.io-index", - /* The version requirement for the dependency. - Dependencies without a version requirement have a value of "*". - */ - "req": "^1.0", - /* The dependency kind. - "dev", "build", or null for a normal dependency. - */ - "kind": null, - /* If the dependency is renamed, this is the new name for - the dependency as a string. null if it is not renamed. - */ - "rename": null, - /* Boolean of whether or not this is an optional dependency. */ - "optional": false, - /* Boolean of whether or not default features are enabled. */ - "uses_default_features": true, - /* Array of features enabled. */ - "features": [], - /* The target platform for the dependency. - null if not a target dependency. - */ - "target": "cfg(windows)", - /* The file system path for a local path dependency. - not present if not a path dependency. - */ - "path": "/path/to/dep", - /* A string of the URL of the registry this dependency is from. - If not specified or null, the dependency is from the default - registry (crates.io). - */ - "registry": null - } - ], - /* Array of Cargo targets. */ - "targets": [ - { - /* Array of target kinds. - - lib targets list the `crate-type` values from the - manifest such as "lib", "rlib", "dylib", - "proc-macro", etc. (default ["lib"]) - - binary is ["bin"] - - example is ["example"] - - integration test is ["test"] - - benchmark is ["bench"] - - build script is ["custom-build"] - */ - "kind": [ - "bin" - ], - /* Array of crate types. - - lib and example libraries list the `crate-type` values - from the manifest such as "lib", "rlib", "dylib", - "proc-macro", etc. (default ["lib"]) - - all other target kinds are ["bin"] - */ - "crate_types": [ - "bin" - ], - /* The name of the target. */ - "name": "my-package", - /* Absolute path to the root source file of the target. */ - "src_path": "/path/to/my-package/src/main.rs", - /* The Rust edition of the target. - Defaults to the package edition. - */ - "edition": "2018", - /* Array of required features. - This property is not included if no required features are set. - */ - "required-features": ["feat1"], - /* Whether the target should be documented by `cargo doc`. */ - "doc": true, - /* Whether or not this target has doc tests enabled, and - the target is compatible with doc testing. - */ - "doctest": false, - /* Whether or not this target should be built and run with `--test` - */ - "test": true - } - ], - /* Set of features defined for the package. - Each feature maps to an array of features or dependencies it - enables. - */ - "features": { - "default": [ - "feat1" - ], - "feat1": [], - "feat2": [] - }, - /* Absolute path to this package's manifest. */ - "manifest_path": "/path/to/my-package/Cargo.toml", - /* Package metadata. - This is null if no metadata is specified. - */ - "metadata": { - "docs": { - "rs": { - "all-features": true - } - } - }, - /* List of registries to which this package may be published. - Publishing is unrestricted if null, and forbidden if an empty array. */ - "publish": [ - "crates-io" - ], - /* Array of authors from the manifest. - Empty array if no authors specified. - */ - "authors": [ - "Jane Doe " - ], - /* Array of categories from the manifest. */ - "categories": [ - "command-line-utilities" - ], - /* Optional string that is the default binary picked by cargo run. */ - "default_run": null, - /* Optional string that is the minimum supported rust version */ - "rust_version": "1.56", - /* Array of keywords from the manifest. */ - "keywords": [ - "cli" - ], - /* The readme value from the manifest or null if not specified. */ - "readme": "README.md", - /* The repository value from the manifest or null if not specified. */ - "repository": "https://github.com/rust-lang/cargo", - /* The homepage value from the manifest or null if not specified. */ - "homepage": "https://rust-lang.org", - /* The documentation value from the manifest or null if not specified. */ - "documentation": "https://doc.rust-lang.org/stable/std", - /* The default edition of the package. - Note that individual targets may have different editions. - */ - "edition": "2018", - /* Optional string that is the name of a native library the package - is linking to. - */ - "links": null, - } - ], - /* Array of members of the workspace. - Each entry is the Package ID for the package. - */ - "workspace_members": [ - "my-package 0.1.0 (path+file:///path/to/my-package)", - ], - // The resolved dependency graph for the entire workspace. The enabled - // features are based on the enabled features for the "current" package. - // Inactivated optional dependencies are not listed. - // - // This is null if --no-deps is specified. - // - // By default, this includes all dependencies for all target platforms. - // The `--filter-platform` flag may be used to narrow to a specific - // target triple. - "resolve": { - /* Array of nodes within the dependency graph. - Each node is a package. - */ - "nodes": [ - { - /* The Package ID of this node. */ - "id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* The dependencies of this package, an array of Package IDs. */ - "dependencies": [ - "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" - ], - /* The dependencies of this package. This is an alternative to - "dependencies" which contains additional information. In - particular, this handles renamed dependencies. - */ - "deps": [ - { - /* The name of the dependency's library target. - If this is a renamed dependency, this is the new - name. - */ - "name": "bitflags", - /* The Package ID of the dependency. */ - "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - /* Array of dependency kinds. Added in Cargo 1.40. */ - "dep_kinds": [ - { - /* The dependency kind. - "dev", "build", or null for a normal dependency. - */ - "kind": null, - /* The target platform for the dependency. - null if not a target dependency. - */ - "target": "cfg(windows)" - } - ] - } - ], - /* Array of features enabled on this package. */ - "features": [ - "default" - ] - } - ], - /* The root package of the workspace. - This is null if this is a virtual workspace. Otherwise it is - the Package ID of the root package. - */ - "root": "my-package 0.1.0 (path+file:///path/to/my-package)" - }, - /* The absolute path to the build directory where Cargo places its output. */ - "target_directory": "/path/to/my-package/target", - /* The version of the schema for this metadata structure. - This will be changed if incompatible changes are ever made. - */ - "version": 1, - /* The absolute path to the root of the workspace. */ - "workspace_root": "/path/to/my-package" - /* Workspace metadata. - This is null if no metadata is specified. */ - "metadata": { - "docs": { - "rs": { - "all-features": true - } - } - } - } - -OPTIONS - Output Options - --no-deps - Output information only about the workspace members and don't fetch - dependencies. - - --format-version version - Specify the version of the output format to use. Currently 1 is the - only possible value. - - --filter-platform triple - This filters the resolve output to only include dependencies for the - given target triple. Without this flag, the resolve includes all - targets. - - Note that the dependencies listed in the "packages" array still - includes all dependencies. Each package definition is intended to be - an unaltered reproduction of the information within Cargo.toml. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Output JSON about the current package: - - cargo metadata --format-version=1 - -SEE ALSO - cargo(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-new.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-new.txt deleted file mode 100644 index 1cb22b57c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-new.txt +++ /dev/null @@ -1,109 +0,0 @@ -CARGO-NEW(1) - -NAME - cargo-new - Create a new Cargo package - -SYNOPSIS - cargo new [options] path - -DESCRIPTION - This command will create a new Cargo package in the given directory. - This includes a simple template with a Cargo.toml manifest, sample - source file, and a VCS ignore file. If the directory is not already in a - VCS repository, then a new repository is created (see --vcs below). - - See cargo-init(1) for a similar command which will create a new manifest - in an existing directory. - -OPTIONS - New Options - --bin - Create a package with a binary target (src/main.rs). This is the - default behavior. - - --lib - Create a package with a library target (src/lib.rs). - - --edition edition - Specify the Rust edition to use. Default is 2021. Possible values: - 2015, 2018, 2021 - - --name name - Set the package name. Defaults to the directory name. - - --vcs vcs - Initialize a new VCS repository for the given version control system - (git, hg, pijul, or fossil) or do not initialize any version control - at all (none). If not specified, defaults to git or the - configuration value cargo-new.vcs, or none if already inside a VCS - repository. - - --registry registry - This sets the publish field in Cargo.toml to the given registry name - which will restrict publishing only to that registry. - - Registry names are defined in Cargo config files - . If not - specified, the default registry defined by the registry.default - config key is used. If the default registry is not set and - --registry is not used, the publish field will not be set which - means that publishing will not be restricted. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Create a binary Cargo package in the given directory: - - cargo new foo - -SEE ALSO - cargo(1), cargo-init(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-owner.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-owner.txt deleted file mode 100644 index ac3b19bf7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-owner.txt +++ /dev/null @@ -1,124 +0,0 @@ -CARGO-OWNER(1) - -NAME - cargo-owner - Manage the owners of a crate on the registry - -SYNOPSIS - cargo owner [options] --add login [crate] - cargo owner [options] --remove login [crate] - cargo owner [options] --list [crate] - -DESCRIPTION - This command will modify the owners for a crate on the registry. Owners - of a crate can upload new versions and yank old versions. Non-team - owners can also modify the set of owners, so take care! - - This command requires you to be authenticated with either the --token - option or using cargo-login(1). - - If the crate name is not specified, it will use the package name from - the current directory. - - See the reference - - for more information about owners and publishing. - -OPTIONS - Owner Options - -a, --add login... - Invite the given user or team as an owner. - - -r, --remove login... - Remove the given user or team as an owner. - - -l, --list - List owners of a crate. - - --token token - API token to use when authenticating. This overrides the token - stored in the credentials file (which is created by cargo-login(1)). - - Cargo config - environment variables can be used to override the tokens stored in - the credentials file. The token for crates.io may be specified with - the CARGO_REGISTRY_TOKEN environment variable. Tokens for other - registries may be specified with environment variables of the form - CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry - in all capital letters. - - --index index - The URL of the registry index to use. - - --registry registry - Name of the registry to use. Registry names are defined in Cargo - config files - . If not - specified, the default registry is used, which is defined by the - registry.default config key which defaults to crates-io. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. List owners of a package: - - cargo owner --list foo - - 2. Invite an owner to a package: - - cargo owner --add username foo - - 3. Remove an owner from a package: - - cargo owner --remove username foo - -SEE ALSO - cargo(1), cargo-login(1), cargo-publish(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-package.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-package.txt deleted file mode 100644 index 91bc938b2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-package.txt +++ /dev/null @@ -1,249 +0,0 @@ -CARGO-PACKAGE(1) - -NAME - cargo-package - Assemble the local package into a distributable tarball - -SYNOPSIS - cargo package [options] - -DESCRIPTION - This command will create a distributable, compressed .crate file with - the source code of the package in the current directory. The resulting - file will be stored in the target/package directory. This performs the - following steps: - - 1. Load and check the current workspace, performing some basic checks. - o Path dependencies are not allowed unless they have a version key. - Cargo will ignore the path key for dependencies in published - packages. dev-dependencies do not have this restriction. - - 2. Create the compressed .crate file. - o The original Cargo.toml file is rewritten and normalized. - - o [patch], [replace], and [workspace] sections are removed from the - manifest. - - o Cargo.lock is automatically included if the package contains an - executable binary or example target. cargo-install(1) will use the - packaged lock file if the --locked flag is used. - - o A .cargo_vcs_info.json file is included that contains information - about the current VCS checkout hash if available (not included - with --allow-dirty). - - 3. Extract the .crate file and build it to verify it can build. - o This will rebuild your package from scratch to ensure that it can - be built from a pristine state. The --no-verify flag can be used - to skip this step. - - 4. Check that build scripts did not modify any source files. - - The list of files included can be controlled with the include and - exclude fields in the manifest. - - See the reference - for more - details about packaging and publishing. - - .cargo_vcs_info.json format - Will generate a .cargo_vcs_info.json in the following format - - { - "git": { - "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302" - }, - "path_in_vcs": "" - } - - path_in_vcs will be set to a repo-relative path for packages in - subdirectories of the version control repository. - -OPTIONS - Package Options - -l, --list - Print files included in a package without making one. - - --no-verify - Don't verify the contents by building them. - - --no-metadata - Ignore warnings about a lack of human-usable metadata (such as the - description or the license). - - --allow-dirty - Allow working directories with uncommitted VCS changes to be - packaged. - - Package Selection - By default, when no package selection options are given, the packages - selected depend on the selected manifest file (based on the current - working directory if --manifest-path is not given). If the manifest is - the root of a workspace then the workspaces default members are - selected, otherwise only the package defined by the manifest will be - selected. - - The default members of a workspace can be set explicitly with the - workspace.default-members key in the root manifest. If this is not set, - a virtual workspace will include all workspace members (equivalent to - passing --workspace), and a non-virtual workspace will include only the - root crate itself. - - -p spec..., --package spec... - Package only the specified packages. See cargo-pkgid(1) for the SPEC - format. This flag may be specified multiple times and supports - common Unix glob patterns like *, ? and []. However, to avoid your - shell accidentally expanding glob patterns before Cargo handles - them, you must use single quotes or double quotes around each - pattern. - - --workspace - Package all members in the workspace. - - --exclude SPEC... - Exclude the specified packages. Must be used in conjunction with the - --workspace flag. This flag may be specified multiple times and - supports common Unix glob patterns like *, ? and []. However, to - avoid your shell accidentally expanding glob patterns before Cargo - handles them, you must use single quotes or double quotes around - each pattern. - - Compilation Options - --target triple - Package for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Create a compressed .crate file of the current package: - - cargo package - -SEE ALSO - cargo(1), cargo-publish(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-pkgid.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-pkgid.txt deleted file mode 100644 index 1f6cfa1e1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-pkgid.txt +++ /dev/null @@ -1,148 +0,0 @@ -CARGO-PKGID(1) - -NAME - cargo-pkgid - Print a fully qualified package specification - -SYNOPSIS - cargo pkgid [options] [spec] - -DESCRIPTION - Given a spec argument, print out the fully qualified package ID - specifier for a package or dependency in the current workspace. This - command will generate an error if spec is ambiguous as to which package - it refers to in the dependency graph. If no spec is given, then the - specifier for the local package is printed. - - This command requires that a lockfile is available and dependencies have - been fetched. - - A package specifier consists of a name, version, and source URL. You are - allowed to use partial specifiers to succinctly match a specific package - as long as it matches only one package. The format of a spec can be one - of the following: - - +-----------------+--------------------------------------------------+ - | SPEC Structure | Example SPEC | - +-----------------+--------------------------------------------------+ - | name | bitflags | - +-----------------+--------------------------------------------------+ - | name:version | bitflags:1.0.4 | - +-----------------+--------------------------------------------------+ - | url | https://github.com/rust-lang/cargo | - +-----------------+--------------------------------------------------+ - | url#version | https://github.com/rust-lang/cargo#0.33.0 | - +-----------------+--------------------------------------------------+ - | url#name | | - | | https://github.com/rust-lang/crates.io-index#bitflags | - +-----------------+--------------------------------------------------+ - | | | - | url#name:version | https://github.com/rust-lang/cargo#crates-io:0.21.0 | - +-----------------+--------------------------------------------------+ - -OPTIONS - Package Selection - -p spec, --package spec - Get the package ID for the given package instead of the current - package. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Retrieve package specification for foo package: - - cargo pkgid foo - - 2. Retrieve package specification for version 1.0.0 of foo: - - cargo pkgid foo:1.0.0 - - 3. Retrieve package specification for foo from crates.io: - - cargo pkgid https://github.com/rust-lang/crates.io-index#foo - - 4. Retrieve package specification for foo from a local package: - - cargo pkgid file:///path/to/local/package#foo - -SEE ALSO - cargo(1), cargo-generate-lockfile(1), cargo-metadata(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-publish.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-publish.txt deleted file mode 100644 index 5cd25c59d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-publish.txt +++ /dev/null @@ -1,216 +0,0 @@ -CARGO-PUBLISH(1) - -NAME - cargo-publish - Upload a package to the registry - -SYNOPSIS - cargo publish [options] - -DESCRIPTION - This command will create a distributable, compressed .crate file with - the source code of the package in the current directory and upload it to - a registry. The default registry is . This performs - the following steps: - - 1. Performs a few checks, including: - o Checks the package.publish key in the manifest for restrictions on - which registries you are allowed to publish to. - - 2. Create a .crate file by following the steps in cargo-package(1). - - 3. Upload the crate to the registry. Note that the server will perform - additional checks on the crate. - - This command requires you to be authenticated with either the --token - option or using cargo-login(1). - - See the reference - for more - details about packaging and publishing. - -OPTIONS - Publish Options - --dry-run - Perform all checks without uploading. - - --token token - API token to use when authenticating. This overrides the token - stored in the credentials file (which is created by cargo-login(1)). - - Cargo config - environment variables can be used to override the tokens stored in - the credentials file. The token for crates.io may be specified with - the CARGO_REGISTRY_TOKEN environment variable. Tokens for other - registries may be specified with environment variables of the form - CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry - in all capital letters. - - --no-verify - Don't verify the contents by building them. - - --allow-dirty - Allow working directories with uncommitted VCS changes to be - packaged. - - --index index - The URL of the registry index to use. - - --registry registry - Name of the registry to publish to. Registry names are defined in - Cargo config files - . If not - specified, and there is a package.publish - - field in Cargo.toml with a single registry, then it will publish to - that registry. Otherwise it will use the default registry, which is - defined by the registry.default - - config key which defaults to crates-io. - - Package Selection - By default, the package in the current working directory is selected. - The -p flag can be used to choose a different package in a workspace. - - -p spec, --package spec - The package to publish. See cargo-pkgid(1) for the SPEC format. - - Compilation Options - --target triple - Publish for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Publish the current package: - - cargo publish - -SEE ALSO - cargo(1), cargo-package(1), cargo-login(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-report.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-report.txt deleted file mode 100644 index 489e45dcb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-report.txt +++ /dev/null @@ -1,34 +0,0 @@ -CARGO-REPORT(1) - -NAME - cargo-report - Generate and display various kinds of reports - -SYNOPSIS - cargo report type [options] - - DESCRIPTION - Displays a report of the given type - currently, only future-incompat is - supported - -OPTIONS - --id id - Show the report with the specified Cargo-generated id - - -p spec..., --package spec... - Only display a report for the specified package - -EXAMPLES - 1. Display the latest future-incompat report: - - cargo report future-incompat - - 2. Display the latest future-incompat report for a specific package: - - cargo report future-incompat --package my-dep:0.0.1 - -SEE ALSO - Future incompat report - - - cargo(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-run.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-run.txt deleted file mode 100644 index 032789dc7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-run.txt +++ /dev/null @@ -1,224 +0,0 @@ -CARGO-RUN(1) - -NAME - cargo-run - Run the current package - -SYNOPSIS - cargo run [options] [-- args] - -DESCRIPTION - Run a binary or example of the local package. - - All the arguments following the two dashes (--) are passed to the binary - to run. If you're passing arguments to both Cargo and the binary, the - ones after -- go to the binary, the ones before go to Cargo. - -OPTIONS - Package Selection - By default, the package in the current working directory is selected. - The -p flag can be used to choose a different package in a workspace. - - -p spec, --package spec - The package to run. See cargo-pkgid(1) for the SPEC format. - - Target Selection - When no target selection options are given, cargo run will run the - binary target. If there are multiple binary targets, you must pass a - target flag to choose one. Or, the default-run field may be specified in - the [package] section of Cargo.toml to choose the name of the binary to - run by default. - - --bin name - Run the specified binary. - - --example name - Run the specified example. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Run for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - -r, --release - Run optimized artifacts with the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Run with the given profile. See the the reference - for more - details on profiles. - - --ignore-rust-version - Run the target even if the selected Rust compiler is older than the - required Rust version as configured in the project's rust-version - field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Build the local package and run its main target (assuming only one - binary): - - cargo run - - 2. Run an example with extra arguments: - - cargo run --example exname -- --exoption exarg1 exarg2 - -SEE ALSO - cargo(1), cargo-build(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-rustc.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-rustc.txt deleted file mode 100644 index 5ec4c0107..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-rustc.txt +++ /dev/null @@ -1,309 +0,0 @@ -CARGO-RUSTC(1) - -NAME - cargo-rustc - Compile the current package, and pass extra options to the - compiler - -SYNOPSIS - cargo rustc [options] [-- args] - -DESCRIPTION - The specified target for the current package (or package specified by -p - if provided) will be compiled along with all of its dependencies. The - specified args will all be passed to the final compiler invocation, not - any of the dependencies. Note that the compiler will still - unconditionally receive arguments such as -L, --extern, and - --crate-type, and the specified args will simply be added to the - compiler invocation. - - See for documentation on - rustc flags. - - This command requires that only one target is being compiled when - additional arguments are provided. If more than one target is available - for the current package the filters of --lib, --bin, etc, must be used - to select which target is compiled. - - To pass flags to all compiler processes spawned by Cargo, use the - RUSTFLAGS environment variable - - or the build.rustflags config value - . - -OPTIONS - Package Selection - By default, the package in the current working directory is selected. - The -p flag can be used to choose a different package in a workspace. - - -p spec, --package spec - The package to build. See cargo-pkgid(1) for the SPEC format. - - Target Selection - When no target selection options are given, cargo rustc will build all - binary and library targets of the selected package. - - Passing target selection flags will build only the specified targets. - - Note that --bin, --example, --test and --bench flags also support common - Unix glob patterns like *, ? and []. However, to avoid your shell - accidentally expanding glob patterns before Cargo handles them, you must - use single quotes or double quotes around each glob pattern. - - --lib - Build the package's library. - - --bin name... - Build the specified binary. This flag may be specified multiple - times and supports common Unix glob patterns. - - --bins - Build all binary targets. - - --example name... - Build the specified example. This flag may be specified multiple - times and supports common Unix glob patterns. - - --examples - Build all example targets. - - --test name... - Build the specified integration test. This flag may be specified - multiple times and supports common Unix glob patterns. - - --tests - Build all targets in test mode that have the test = true manifest - flag set. By default this includes the library and binaries built as - unittests, and integration tests. Be aware that this will also build - any required dependencies, so the lib target may be built twice - (once as a unittest, and once as a dependency for binaries, - integration tests, etc.). Targets may be enabled or disabled by - setting the test flag in the manifest settings for the target. - - --bench name... - Build the specified benchmark. This flag may be specified multiple - times and supports common Unix glob patterns. - - --benches - Build all targets in benchmark mode that have the bench = true - manifest flag set. By default this includes the library and binaries - built as benchmarks, and bench targets. Be aware that this will also - build any required dependencies, so the lib target may be built - twice (once as a benchmark, and once as a dependency for binaries, - benchmarks, etc.). Targets may be enabled or disabled by setting the - bench flag in the manifest settings for the target. - - --all-targets - Build all targets. This is equivalent to specifying --lib --bins - --tests --benches --examples. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Build for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - -r, --release - Build optimized artifacts with the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Build with the given profile. - - The rustc subcommand will treat the following named profiles with - special behaviors: - - o check โ€” Builds in the same way as the cargo-check(1) command - with the dev profile. - - o test โ€” Builds in the same way as the cargo-test(1) command, - enabling building in test mode which will enable tests and enable - the test cfg option. See rustc tests - for more - detail. - - o bench โ€” Builds in the same was as the cargo-bench(1) command, - similar to the test profile. - - See the the reference - for more - details on profiles. - - --ignore-rust-version - Build the target even if the selected Rust compiler is older than - the required Rust version as configured in the project's - rust-version field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - - --future-incompat-report - Displays a future-incompat report for any future-incompatible - warnings produced during execution of this command - - See cargo-report(1) - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Check if your package (not including dependencies) uses unsafe code: - - cargo rustc --lib -- -D unsafe-code - - 2. Try an experimental flag on the nightly compiler, such as this which - prints the size of every type: - - cargo rustc --lib -- -Z print-type-sizes - -SEE ALSO - cargo(1), cargo-build(1), rustc(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-rustdoc.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-rustdoc.txt deleted file mode 100644 index 618a833e0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-rustdoc.txt +++ /dev/null @@ -1,290 +0,0 @@ -CARGO-RUSTDOC(1) - -NAME - cargo-rustdoc - Build a package's documentation, using specified custom - flags - -SYNOPSIS - cargo rustdoc [options] [-- args] - -DESCRIPTION - The specified target for the current package (or package specified by -p - if provided) will be documented with the specified args being passed to - the final rustdoc invocation. Dependencies will not be documented as - part of this command. Note that rustdoc will still unconditionally - receive arguments such as -L, --extern, and --crate-type, and the - specified args will simply be added to the rustdoc invocation. - - See for documentation on - rustdoc flags. - - This command requires that only one target is being compiled when - additional arguments are provided. If more than one target is available - for the current package the filters of --lib, --bin, etc, must be used - to select which target is compiled. - - To pass flags to all rustdoc processes spawned by Cargo, use the - RUSTDOCFLAGS environment variable - - or the build.rustdocflags config value - . - -OPTIONS - Documentation Options - --open - Open the docs in a browser after building them. This will use your - default browser unless you define another one in the BROWSER - environment variable or use the doc.browser - - configuration option. - - Package Selection - By default, the package in the current working directory is selected. - The -p flag can be used to choose a different package in a workspace. - - -p spec, --package spec - The package to document. See cargo-pkgid(1) for the SPEC format. - - Target Selection - When no target selection options are given, cargo rustdoc will document - all binary and library targets of the selected package. The binary will - be skipped if its name is the same as the lib target. Binaries are - skipped if they have required-features that are missing. - - Passing target selection flags will document only the specified targets. - - Note that --bin, --example, --test and --bench flags also support common - Unix glob patterns like *, ? and []. However, to avoid your shell - accidentally expanding glob patterns before Cargo handles them, you must - use single quotes or double quotes around each glob pattern. - - --lib - Document the package's library. - - --bin name... - Document the specified binary. This flag may be specified multiple - times and supports common Unix glob patterns. - - --bins - Document all binary targets. - - --example name... - Document the specified example. This flag may be specified multiple - times and supports common Unix glob patterns. - - --examples - Document all example targets. - - --test name... - Document the specified integration test. This flag may be specified - multiple times and supports common Unix glob patterns. - - --tests - Document all targets in test mode that have the test = true manifest - flag set. By default this includes the library and binaries built as - unittests, and integration tests. Be aware that this will also build - any required dependencies, so the lib target may be built twice - (once as a unittest, and once as a dependency for binaries, - integration tests, etc.). Targets may be enabled or disabled by - setting the test flag in the manifest settings for the target. - - --bench name... - Document the specified benchmark. This flag may be specified - multiple times and supports common Unix glob patterns. - - --benches - Document all targets in benchmark mode that have the bench = true - manifest flag set. By default this includes the library and binaries - built as benchmarks, and bench targets. Be aware that this will also - build any required dependencies, so the lib target may be built - twice (once as a benchmark, and once as a dependency for binaries, - benchmarks, etc.). Targets may be enabled or disabled by setting the - bench flag in the manifest settings for the target. - - --all-targets - Document all targets. This is equivalent to specifying --lib --bins - --tests --benches --examples. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Document for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - -r, --release - Document optimized artifacts with the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Document with the given profile. See the the reference - for more - details on profiles. - - --ignore-rust-version - Document the target even if the selected Rust compiler is older than - the required Rust version as configured in the project's - rust-version field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Build documentation with custom CSS included from a given file: - - cargo rustdoc --lib -- --extend-css extra.css - -SEE ALSO - cargo(1), cargo-doc(1), rustdoc(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-search.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-search.txt deleted file mode 100644 index 2e2e8d22d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-search.txt +++ /dev/null @@ -1,86 +0,0 @@ -CARGO-SEARCH(1) - -NAME - cargo-search - Search packages in crates.io - -SYNOPSIS - cargo search [options] [query...] - -DESCRIPTION - This performs a textual search for crates on . The - matching crates will be displayed along with their description in TOML - format suitable for copying into a Cargo.toml manifest. - -OPTIONS - Search Options - --limit limit - Limit the number of results (default: 10, max: 100). - - --index index - The URL of the registry index to use. - - --registry registry - Name of the registry to use. Registry names are defined in Cargo - config files - . If not - specified, the default registry is used, which is defined by the - registry.default config key which defaults to crates-io. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Search for a package from crates.io: - - cargo search serde - -SEE ALSO - cargo(1), cargo-install(1), cargo-publish(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-test.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-test.txt deleted file mode 100644 index a01674132..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-test.txt +++ /dev/null @@ -1,393 +0,0 @@ -CARGO-TEST(1) - -NAME - cargo-test - Execute unit and integration tests of a package - -SYNOPSIS - cargo test [options] [testname] [-- test-options] - -DESCRIPTION - Compile and execute unit and integration tests. - - The test filtering argument TESTNAME and all the arguments following the - two dashes (--) are passed to the test binaries and thus to libtest - (rustc's built in unit-test and micro-benchmarking framework). If you're - passing arguments to both Cargo and the binary, the ones after -- go to - the binary, the ones before go to Cargo. For details about libtest's - arguments see the output of cargo test -- --help and check out the rustc - book's chapter on how tests work at - . - - As an example, this will filter for tests with foo in their name and run - them on 3 threads in parallel: - - cargo test foo -- --test-threads 3 - - Tests are built with the --test option to rustc which creates an - executable with a main function that automatically runs all functions - annotated with the #[test] attribute in multiple threads. #[bench] - annotated functions will also be run with one iteration to verify that - they are functional. - - The libtest harness may be disabled by setting harness = false in the - target manifest settings, in which case your code will need to provide - its own main function to handle running tests. - - Documentation tests are also run by default, which is handled by - rustdoc. It extracts code samples from documentation comments and - executes them. See the rustdoc book - for more information on writing doc tests. - -OPTIONS - Test Options - --no-run - Compile, but don't run tests. - - --no-fail-fast - Run all tests regardless of failure. Without this flag, Cargo will - exit after the first executable fails. The Rust test harness will - run all tests within the executable to completion, this flag only - applies to the executable as a whole. - - Package Selection - By default, when no package selection options are given, the packages - selected depend on the selected manifest file (based on the current - working directory if --manifest-path is not given). If the manifest is - the root of a workspace then the workspaces default members are - selected, otherwise only the package defined by the manifest will be - selected. - - The default members of a workspace can be set explicitly with the - workspace.default-members key in the root manifest. If this is not set, - a virtual workspace will include all workspace members (equivalent to - passing --workspace), and a non-virtual workspace will include only the - root crate itself. - - -p spec..., --package spec... - Test only the specified packages. See cargo-pkgid(1) for the SPEC - format. This flag may be specified multiple times and supports - common Unix glob patterns like *, ? and []. However, to avoid your - shell accidentally expanding glob patterns before Cargo handles - them, you must use single quotes or double quotes around each - pattern. - - --workspace - Test all members in the workspace. - - --all - Deprecated alias for --workspace. - - --exclude SPEC... - Exclude the specified packages. Must be used in conjunction with the - --workspace flag. This flag may be specified multiple times and - supports common Unix glob patterns like *, ? and []. However, to - avoid your shell accidentally expanding glob patterns before Cargo - handles them, you must use single quotes or double quotes around - each pattern. - - Target Selection - When no target selection options are given, cargo test will build the - following targets of the selected packages: - - o lib โ€” used to link with binaries, examples, integration tests, and - doc tests - - o bins (only if integration tests are built and required features are - available) - - o examples โ€” to ensure they compile - - o lib as a unit test - - o bins as unit tests - - o integration tests - - o doc tests for the lib target - - The default behavior can be changed by setting the test flag for the - target in the manifest settings. Setting examples to test = true will - build and run the example as a test. Setting targets to test = false - will stop them from being tested by default. Target selection options - that take a target by name ignore the test flag and will always test the - given target. - - Doc tests for libraries may be disabled by setting doctest = false for - the library in the manifest. - - Binary targets are automatically built if there is an integration test - or benchmark. This allows an integration test to execute the binary to - exercise and test its behavior. The CARGO_BIN_EXE_ environment - variable - - is set when the integration test is built so that it can use the env - macro to locate the - executable. - - Passing target selection flags will test only the specified targets. - - Note that --bin, --example, --test and --bench flags also support common - Unix glob patterns like *, ? and []. However, to avoid your shell - accidentally expanding glob patterns before Cargo handles them, you must - use single quotes or double quotes around each glob pattern. - - --lib - Test the package's library. - - --bin name... - Test the specified binary. This flag may be specified multiple times - and supports common Unix glob patterns. - - --bins - Test all binary targets. - - --example name... - Test the specified example. This flag may be specified multiple - times and supports common Unix glob patterns. - - --examples - Test all example targets. - - --test name... - Test the specified integration test. This flag may be specified - multiple times and supports common Unix glob patterns. - - --tests - Test all targets in test mode that have the test = true manifest - flag set. By default this includes the library and binaries built as - unittests, and integration tests. Be aware that this will also build - any required dependencies, so the lib target may be built twice - (once as a unittest, and once as a dependency for binaries, - integration tests, etc.). Targets may be enabled or disabled by - setting the test flag in the manifest settings for the target. - - --bench name... - Test the specified benchmark. This flag may be specified multiple - times and supports common Unix glob patterns. - - --benches - Test all targets in benchmark mode that have the bench = true - manifest flag set. By default this includes the library and binaries - built as benchmarks, and bench targets. Be aware that this will also - build any required dependencies, so the lib target may be built - twice (once as a benchmark, and once as a dependency for binaries, - benchmarks, etc.). Targets may be enabled or disabled by setting the - bench flag in the manifest settings for the target. - - --all-targets - Test all targets. This is equivalent to specifying --lib --bins - --tests --benches --examples. - - --doc - Test only the library's documentation. This cannot be mixed with - other target options. - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Compilation Options - --target triple - Test for the given architecture. The default is the host - architecture. The general format of the triple is - ---. Run rustc --print target-list for - a list of supported targets. - - This may also be specified with the build.target config value - . - - Note that specifying this flag makes Cargo run in a different mode - where the target artifacts are placed in a separate directory. See - the build cache - - documentation for more details. - - -r, --release - Test optimized artifacts with the release profile. See also the - --profile option for choosing a specific profile by name. - - --profile name - Test with the given profile. See the the reference - for more - details on profiles. - - --ignore-rust-version - Test the target even if the selected Rust compiler is older than the - required Rust version as configured in the project's rust-version - field. - - Output Options - --target-dir directory - Directory for all generated artifacts and intermediate files. May - also be specified with the CARGO_TARGET_DIR environment variable, or - the build.target-dir config value - . Defaults to - target in the root of the workspace. - - Display Options - By default the Rust test harness hides output from test execution to - keep results readable. Test output can be recovered (e.g., for - debugging) by passing --nocapture to the test binaries: - - cargo test -- --nocapture - - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - --message-format fmt - The output format for diagnostic messages. Can be specified multiple - times and consists of comma-separated values. Valid values: - - o human (default): Display in a human-readable text format. - Conflicts with short and json. - - o short: Emit shorter, human-readable text messages. Conflicts with - human and json. - - o json: Emit JSON messages to stdout. See the reference - - for more details. Conflicts with human and short. - - o json-diagnostic-short: Ensure the rendered field of JSON messages - contains the "short" rendering from rustc. Cannot be used with - human or short. - - o json-diagnostic-rendered-ansi: Ensure the rendered field of JSON - messages contains embedded ANSI color codes for respecting - rustc's default color scheme. Cannot be used with human or short. - - o json-render-diagnostics: Instruct Cargo to not include rustc - diagnostics in in JSON messages printed, but instead Cargo itself - should render the JSON diagnostics coming from rustc. Cargo's own - JSON diagnostics and others coming from rustc are still emitted. - Cannot be used with human or short. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - - Miscellaneous Options - The --jobs argument affects the building of the test executable but does - not affect how many threads are used when running the tests. The Rust - test harness includes an option to control the number of threads used: - - cargo test -j 2 -- --test-threads=2 - - -j N, --jobs N - Number of parallel jobs to run. May also be specified with the - build.jobs config value - . Defaults to - the number of CPUs. - - --future-incompat-report - Displays a future-incompat report for any future-incompatible - warnings produced during execution of this command - - See cargo-report(1) - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Execute all the unit and integration tests of the current package: - - cargo test - - 2. Run only tests whose names match against a filter string: - - cargo test name_filter - - 3. Run only a specific test within a specific integration test: - - cargo test --test int_test_name -- modname::test_name - -SEE ALSO - cargo(1), cargo-bench(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-tree.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-tree.txt deleted file mode 100644 index c8d4c6b5e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-tree.txt +++ /dev/null @@ -1,351 +0,0 @@ -CARGO-TREE(1) - -NAME - cargo-tree - Display a tree visualization of a dependency graph - -SYNOPSIS - cargo tree [options] - -DESCRIPTION - This command will display a tree of dependencies to the terminal. An - example of a simple project that depends on the "rand" package: - - myproject v0.1.0 (/myproject) - โ””โ”€โ”€ rand v0.7.3 - โ”œโ”€โ”€ getrandom v0.1.14 - โ”‚ โ”œโ”€โ”€ cfg-if v0.1.10 - โ”‚ โ””โ”€โ”€ libc v0.2.68 - โ”œโ”€โ”€ libc v0.2.68 (*) - โ”œโ”€โ”€ rand_chacha v0.2.2 - โ”‚ โ”œโ”€โ”€ ppv-lite86 v0.2.6 - โ”‚ โ””โ”€โ”€ rand_core v0.5.1 - โ”‚ โ””โ”€โ”€ getrandom v0.1.14 (*) - โ””โ”€โ”€ rand_core v0.5.1 (*) - [build-dependencies] - โ””โ”€โ”€ cc v1.0.50 - - Packages marked with (*) have been "de-duplicated". The dependencies for - the package have already been shown elsewhere in the graph, and so are - not repeated. Use the --no-dedupe option to repeat the duplicates. - - The -e flag can be used to select the dependency kinds to display. The - "features" kind changes the output to display the features enabled by - each dependency. For example, cargo tree -e features: - - myproject v0.1.0 (/myproject) - โ””โ”€โ”€ log feature "serde" - โ””โ”€โ”€ log v0.4.8 - โ”œโ”€โ”€ serde v1.0.106 - โ””โ”€โ”€ cfg-if feature "default" - โ””โ”€โ”€ cfg-if v0.1.10 - - In this tree, myproject depends on log with the serde feature. log in - turn depends on cfg-if with "default" features. When using -e features - it can be helpful to use -i flag to show how the features flow into a - package. See the examples below for more detail. - -OPTIONS - Tree Options - -i spec, --invert spec - Show the reverse dependencies for the given package. This flag will - invert the tree and display the packages that depend on the given - package. - - Note that in a workspace, by default it will only display the - package's reverse dependencies inside the tree of the workspace - member in the current directory. The --workspace flag can be used to - extend it so that it will show the package's reverse dependencies - across the entire workspace. The -p flag can be used to display the - package's reverse dependencies only with the subtree of the package - given to -p. - - --prune spec - Prune the given package from the display of the dependency tree. - - --depth depth - Maximum display depth of the dependency tree. A depth of 1 displays - the direct dependencies, for example. - - --no-dedupe - Do not de-duplicate repeated dependencies. Usually, when a package - has already displayed its dependencies, further occurrences will not - re-display its dependencies, and will include a (*) to indicate it - has already been shown. This flag will cause those duplicates to be - repeated. - - -d, --duplicates - Show only dependencies which come in multiple versions (implies - --invert). When used with the -p flag, only shows duplicates within - the subtree of the given package. - - It can be beneficial for build times and executable sizes to avoid - building that same package multiple times. This flag can help - identify the offending packages. You can then investigate if the - package that depends on the duplicate with the older version can be - updated to the newer version so that only one instance is built. - - -e kinds, --edges kinds - The dependency kinds to display. Takes a comma separated list of - values: - - o all โ€” Show all edge kinds. - - o normal โ€” Show normal dependencies. - - o build โ€” Show build dependencies. - - o dev โ€” Show development dependencies. - - o features โ€” Show features enabled by each dependency. If this is - the only kind given, then it will automatically include the other - dependency kinds. - - o no-normal โ€” Do not include normal dependencies. - - o no-build โ€” Do not include build dependencies. - - o no-dev โ€” Do not include development dependencies. - - o no-proc-macro โ€” Do not include procedural macro dependencies. - - The normal, build, dev, and all dependency kinds cannot be mixed - with no-normal, no-build, or no-dev dependency kinds. - - The default is normal,build,dev. - - --target triple - Filter dependencies matching the given target-triple. The default is - the host platform. Use the value all to include all targets. - - Tree Formatting Options - --charset charset - Chooses the character set to use for the tree. Valid values are - "utf8" or "ascii". Default is "utf8". - - -f format, --format format - Set the format string for each package. The default is "{p}". - - This is an arbitrary string which will be used to display each - package. The following strings will be replaced with the - corresponding value: - - o {p} โ€” The package name. - - o {l} โ€” The package license. - - o {r} โ€” The package repository URL. - - o {f} โ€” Comma-separated list of package features that are - enabled. - - o {lib} โ€” The name, as used in a use statement, of the package's - library. - - --prefix prefix - Sets how each line is displayed. The prefix value can be one of: - - o indent (default) โ€” Shows each line indented as a tree. - - o depth โ€” Show as a list, with the numeric depth printed before - each entry. - - o none โ€” Show as a flat list. - - Package Selection - By default, when no package selection options are given, the packages - selected depend on the selected manifest file (based on the current - working directory if --manifest-path is not given). If the manifest is - the root of a workspace then the workspaces default members are - selected, otherwise only the package defined by the manifest will be - selected. - - The default members of a workspace can be set explicitly with the - workspace.default-members key in the root manifest. If this is not set, - a virtual workspace will include all workspace members (equivalent to - passing --workspace), and a non-virtual workspace will include only the - root crate itself. - - -p spec..., --package spec... - Display only the specified packages. See cargo-pkgid(1) for the SPEC - format. This flag may be specified multiple times and supports - common Unix glob patterns like *, ? and []. However, to avoid your - shell accidentally expanding glob patterns before Cargo handles - them, you must use single quotes or double quotes around each - pattern. - - --workspace - Display all members in the workspace. - - --exclude SPEC... - Exclude the specified packages. Must be used in conjunction with the - --workspace flag. This flag may be specified multiple times and - supports common Unix glob patterns like *, ? and []. However, to - avoid your shell accidentally expanding glob patterns before Cargo - handles them, you must use single quotes or double quotes around - each pattern. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Feature Selection - The feature flags allow you to control which features are enabled. When - no feature options are given, the default feature is activated for every - selected package. - - See the features documentation - - for more details. - - --features features - Space or comma separated list of features to activate. Features of - workspace members may be enabled with package-name/feature-name - syntax. This flag may be specified multiple times, which enables all - specified features. - - --all-features - Activate all available features of all selected packages. - - --no-default-features - Do not activate the default feature of the selected packages. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Display the tree for the package in the current directory: - - cargo tree - - 2. Display all the packages that depend on the syn package: - - cargo tree -i syn - - 3. Show the features enabled on each package: - - cargo tree --format "{p} {f}" - - 4. Show all packages that are built multiple times. This can happen if - multiple semver-incompatible versions appear in the tree (like 1.0.0 - and 2.0.0). - - cargo tree -d - - 5. Explain why features are enabled for the syn package: - - cargo tree -e features -i syn - - The -e features flag is used to show features. The -i flag is used to - invert the graph so that it displays the packages that depend on syn. - An example of what this would display: - - syn v1.0.17 - โ”œโ”€โ”€ syn feature "clone-impls" - โ”‚ โ””โ”€โ”€ syn feature "default" - โ”‚ โ””โ”€โ”€ rustversion v1.0.2 - โ”‚ โ””โ”€โ”€ rustversion feature "default" - โ”‚ โ””โ”€โ”€ myproject v0.1.0 (/myproject) - โ”‚ โ””โ”€โ”€ myproject feature "default" (command-line) - โ”œโ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "derive" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "full" - โ”‚ โ””โ”€โ”€ rustversion v1.0.2 (*) - โ”œโ”€โ”€ syn feature "parsing" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "printing" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "proc-macro" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ””โ”€โ”€ syn feature "quote" - โ”œโ”€โ”€ syn feature "printing" (*) - โ””โ”€โ”€ syn feature "proc-macro" (*) - - To read this graph, you can follow the chain for each feature from - the root to see why it is included. For example, the "full" feature - is added by the rustversion crate which is included from myproject - (with the default features), and myproject is the package selected on - the command-line. All of the other syn features are added by the - "default" feature ("quote" is added by "printing" and "proc-macro", - both of which are default features). - - If you're having difficulty cross-referencing the de-duplicated (*) - entries, try with the --no-dedupe flag to get the full output. - -SEE ALSO - cargo(1), cargo-metadata(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-uninstall.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-uninstall.txt deleted file mode 100644 index 7e55fca19..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-uninstall.txt +++ /dev/null @@ -1,98 +0,0 @@ -CARGO-UNINSTALL(1) - -NAME - cargo-uninstall - Remove a Rust binary - -SYNOPSIS - cargo uninstall [options] [spec...] - -DESCRIPTION - This command removes a package installed with cargo-install(1). The spec - argument is a package ID specification of the package to remove (see - cargo-pkgid(1)). - - By default all binaries are removed for a crate but the --bin and - --example flags can be used to only remove particular binaries. - - The installation root is determined, in order of precedence: - - o --root option - - o CARGO_INSTALL_ROOT environment variable - - o install.root Cargo config value - - - o CARGO_HOME environment variable - - o $HOME/.cargo - -OPTIONS - Install Options - -p, --package spec... - Package to uninstall. - - --bin name... - Only uninstall the binary name. - - --root dir - Directory to uninstall packages from. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Uninstall a previously installed package. - - cargo uninstall ripgrep - -SEE ALSO - cargo(1), cargo-install(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-update.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-update.txt deleted file mode 100644 index e1de455bd..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-update.txt +++ /dev/null @@ -1,144 +0,0 @@ -CARGO-UPDATE(1) - -NAME - cargo-update - Update dependencies as recorded in the local lock file - -SYNOPSIS - cargo update [options] - -DESCRIPTION - This command will update dependencies in the Cargo.lock file to the - latest version. If the Cargo.lock file does not exist, it will be - created with the latest available versions. - -OPTIONS - Update Options - -p spec..., --package spec... - Update only the specified packages. This flag may be specified - multiple times. See cargo-pkgid(1) for the SPEC format. - - If packages are specified with the -p flag, then a conservative - update of the lockfile will be performed. This means that only the - dependency specified by SPEC will be updated. Its transitive - dependencies will be updated only if SPEC cannot be updated without - updating dependencies. All other dependencies will remain locked at - their currently recorded versions. - - If -p is not specified, all dependencies are updated. - - --aggressive - When used with -p, dependencies of spec are forced to update as - well. Cannot be used with --precise. - - --precise precise - When used with -p, allows you to specify a specific version number - to set the package to. If the package comes from a git repository, - this can be a git revision (such as a SHA hash or tag). - - -w, --workspace - Attempt to update only packages defined in the workspace. Other - packages are updated only if they don't already exist in the - lockfile. This option is useful for updating Cargo.lock after you've - changed version numbers in Cargo.toml. - - --dry-run - Displays what would be updated, but doesn't actually write the - lockfile. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Update all dependencies in the lockfile: - - cargo update - - 2. Update only specific dependencies: - - cargo update -p foo -p bar - - 3. Set a specific dependency to a specific version: - - cargo update -p foo --precise 1.2.3 - -SEE ALSO - cargo(1), cargo-generate-lockfile(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-vendor.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-vendor.txt deleted file mode 100644 index 6004360bc..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-vendor.txt +++ /dev/null @@ -1,139 +0,0 @@ -CARGO-VENDOR(1) - -NAME - cargo-vendor - Vendor all dependencies locally - -SYNOPSIS - cargo vendor [options] [path] - -DESCRIPTION - This cargo subcommand will vendor all crates.io and git dependencies for - a project into the specified directory at . After this command - completes the vendor directory specified by will contain all - remote sources from dependencies specified. Additional manifests beyond - the default one can be specified with the -s option. - - The cargo vendor command will also print out the configuration necessary - to use the vendored sources, which you will need to add to - .cargo/config.toml. - -OPTIONS - Vendor Options - -s manifest, --sync manifest - Specify extra Cargo.toml manifests to workspaces which should also - be vendored and synced to the output. - - --no-delete - Don't delete the "vendor" directory when vendoring, but rather keep - all existing contents of the vendor directory - - --respect-source-config - Instead of ignoring [source] configuration by default in - .cargo/config.toml read it and use it when downloading crates from - crates.io, for example - - --versioned-dirs - Normally versions are only added to disambiguate multiple versions - of the same package. This option causes all directories in the - "vendor" directory to be versioned, which makes it easier to track - the history of vendored packages over time, and can help with the - performance of re-vendoring when only a subset of the packages have - changed. - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Vendor all dependencies into a local "vendor" folder - - cargo vendor - - 2. Vendor all dependencies into a local "third-party/vendor" folder - - cargo vendor third-party/vendor - - 3. Vendor the current workspace as well as another to "vendor" - - cargo vendor -s ../path/to/Cargo.toml - -SEE ALSO - cargo(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-verify-project.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-verify-project.txt deleted file mode 100644 index 22e4e950e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-verify-project.txt +++ /dev/null @@ -1,109 +0,0 @@ -CARGO-VERIFY-PROJECT(1) - -NAME - cargo-verify-project - Check correctness of crate manifest - -SYNOPSIS - cargo verify-project [options] - -DESCRIPTION - This command will parse the local manifest and check its validity. It - emits a JSON object with the result. A successful validation will - display: - - {"success":"true"} - - An invalid workspace will display: - - {"invalid":"human-readable error message"} - -OPTIONS - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --manifest-path path - Path to the Cargo.toml file. By default, Cargo searches for the - Cargo.toml file in the current directory or any parent directory. - - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: The workspace is OK. - - o 1: The workspace is invalid. - -EXAMPLES - 1. Check the current workspace for errors: - - cargo verify-project - -SEE ALSO - cargo(1), cargo-package(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-version.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-version.txt deleted file mode 100644 index fc26df725..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-version.txt +++ /dev/null @@ -1,32 +0,0 @@ -CARGO-VERSION(1) - -NAME - cargo-version - Show version information - -SYNOPSIS - cargo version [options] - -DESCRIPTION - Displays the version of Cargo. - -OPTIONS - -v, --verbose - Display additional version information. - -EXAMPLES - 1. Display the version: - - cargo version - - 2. The version is also available via flags: - - cargo --version - cargo -V - - 3. Display extra version information: - - cargo -Vv - -SEE ALSO - cargo(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-yank.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-yank.txt deleted file mode 100644 index 3c85d09bb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo-yank.txt +++ /dev/null @@ -1,111 +0,0 @@ -CARGO-YANK(1) - -NAME - cargo-yank - Remove a pushed crate from the index - -SYNOPSIS - cargo yank [options] --vers version [crate] - -DESCRIPTION - The yank command removes a previously published crate's version from the - server's index. This command does not delete any data, and the crate - will still be available for download via the registry's download link. - - Note that existing crates locked to a yanked version will still be able - to download the yanked version to use it. Cargo will, however, not allow - any new crates to be locked to any yanked version. - - This command requires you to be authenticated with either the --token - option or using cargo-login(1). - - If the crate name is not specified, it will use the package name from - the current directory. - -OPTIONS - Yank Options - --vers version - The version to yank or un-yank. - - --undo - Undo a yank, putting a version back into the index. - - --token token - API token to use when authenticating. This overrides the token - stored in the credentials file (which is created by cargo-login(1)). - - Cargo config - environment variables can be used to override the tokens stored in - the credentials file. The token for crates.io may be specified with - the CARGO_REGISTRY_TOKEN environment variable. Tokens for other - registries may be specified with environment variables of the form - CARGO_REGISTRIES_NAME_TOKEN where NAME is the name of the registry - in all capital letters. - - --index index - The URL of the registry index to use. - - --registry registry - Name of the registry to use. Registry names are defined in Cargo - config files - . If not - specified, the default registry is used, which is defined by the - registry.default config key which defaults to crates-io. - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -EXAMPLES - 1. Yank a crate from the index: - - cargo yank --vers 1.0.7 foo - -SEE ALSO - cargo(1), cargo-login(1), cargo-publish(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo.txt b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo.txt deleted file mode 100644 index 91d9bcf78..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/generated_txt/cargo.txt +++ /dev/null @@ -1,273 +0,0 @@ -CARGO(1) - -NAME - cargo - The Rust package manager - -SYNOPSIS - cargo [options] command [args] - cargo [options] --version - cargo [options] --list - cargo [options] --help - cargo [options] --explain code - -DESCRIPTION - This program is a package manager and build tool for the Rust language, - available at . - -COMMANDS - Build Commands - cargo-bench(1) - ย ย ย ย Execute benchmarks of a package. - - cargo-build(1) - ย ย ย ย Compile a package. - - cargo-check(1) - ย ย ย ย Check a local package and all of its dependencies for errors. - - cargo-clean(1) - ย ย ย ย Remove artifacts that Cargo has generated in the past. - - cargo-doc(1) - ย ย ย ย Build a package's documentation. - - cargo-fetch(1) - ย ย ย ย Fetch dependencies of a package from the network. - - cargo-fix(1) - ย ย ย ย Automatically fix lint warnings reported by rustc. - - cargo-run(1) - ย ย ย ย Run a binary or example of the local package. - - cargo-rustc(1) - ย ย ย ย Compile a package, and pass extra options to the compiler. - - cargo-rustdoc(1) - ย ย ย ย Build a package's documentation, using specified custom flags. - - cargo-test(1) - ย ย ย ย Execute unit and integration tests of a package. - - Manifest Commands - cargo-generate-lockfile(1) - ย ย ย ย Generate Cargo.lock for a project. - - cargo-locate-project(1) - ย ย ย ย Print a JSON representation of a Cargo.toml file's location. - - cargo-metadata(1) - ย ย ย ย Output the resolved dependencies of a package in - machine-readable format. - - cargo-pkgid(1) - ย ย ย ย Print a fully qualified package specification. - - cargo-tree(1) - ย ย ย ย Display a tree visualization of a dependency graph. - - cargo-update(1) - ย ย ย ย Update dependencies as recorded in the local lock file. - - cargo-vendor(1) - ย ย ย ย Vendor all dependencies locally. - - cargo-verify-project(1) - ย ย ย ย Check correctness of crate manifest. - - Package Commands - cargo-init(1) - ย ย ย ย Create a new Cargo package in an existing directory. - - cargo-install(1) - ย ย ย ย Build and install a Rust binary. - - cargo-new(1) - ย ย ย ย Create a new Cargo package. - - cargo-search(1) - ย ย ย ย Search packages in crates.io. - - cargo-uninstall(1) - ย ย ย ย Remove a Rust binary. - - Publishing Commands - cargo-login(1) - ย ย ย ย Save an API token from the registry locally. - - cargo-owner(1) - ย ย ย ย Manage the owners of a crate on the registry. - - cargo-package(1) - ย ย ย ย Assemble the local package into a distributable tarball. - - cargo-publish(1) - ย ย ย ย Upload a package to the registry. - - cargo-yank(1) - ย ย ย ย Remove a pushed crate from the index. - - General Commands - cargo-help(1) - ย ย ย ย Display help information about Cargo. - - cargo-version(1) - ย ย ย ย Show version information. - -OPTIONS - Special Options - -V, --version - Print version info and exit. If used with --verbose, prints extra - information. - - --list - List all installed Cargo subcommands. If used with --verbose, prints - extra information. - - --explain code - Run rustc --explain CODE which will print out a detailed explanation - of an error message (for example, E0004). - - Display Options - -v, --verbose - Use verbose output. May be specified twice for "very verbose" output - which includes extra output such as dependency warnings and build - script output. May also be specified with the term.verbose config - value . - - -q, --quiet - Do not print cargo log messages. May also be specified with the - term.quiet config value - . - - --color when - Control when colored output is used. Valid values: - - o auto (default): Automatically detect if color support is - available on the terminal. - - o always: Always display colors. - - o never: Never display colors. - - May also be specified with the term.color config value - . - - Manifest Options - --frozen, --locked - Either of these flags requires that the Cargo.lock file is - up-to-date. If the lock file is missing, or it needs to be updated, - Cargo will exit with an error. The --frozen flag also prevents Cargo - from attempting to access the network to determine if it is - out-of-date. - - These may be used in environments where you want to assert that the - Cargo.lock file is up-to-date (such as a CI build) or want to avoid - network access. - - --offline - Prevents Cargo from accessing the network for any reason. Without - this flag, Cargo will stop with an error if it needs to access the - network and the network is not available. With this flag, Cargo will - attempt to proceed without the network if possible. - - Beware that this may result in different dependency resolution than - online mode. Cargo will restrict itself to crates that are - downloaded locally, even if there might be a newer version as - indicated in the local copy of the index. See the cargo-fetch(1) - command to download dependencies before going offline. - - May also be specified with the net.offline config value - . - - Common Options - +toolchain - If Cargo has been installed with rustup, and the first argument to - cargo begins with +, it will be interpreted as a rustup toolchain - name (such as +stable or +nightly). See the rustup documentation - for more - information about how toolchain overrides work. - - -h, --help - Prints help information. - - -Z flag - Unstable (nightly-only) flags to Cargo. Run cargo -Z help for - details. - -ENVIRONMENT - See the reference - - for details on environment variables that Cargo reads. - -EXIT STATUS - o 0: Cargo succeeded. - - o 101: Cargo failed to complete. - -FILES - ~/.cargo/ - ย ย ย ย Default location for Cargo's "home" directory where it stores - various files. The location can be changed with the CARGO_HOME - environment variable. - - $CARGO_HOME/bin/ - ย ย ย ย Binaries installed by cargo-install(1) will be located here. If - using rustup , executables - distributed with Rust are also located here. - - $CARGO_HOME/config.toml - ย ย ย ย The global configuration file. See the reference - for more - information about configuration files. - - .cargo/config.toml - ย ย ย ย Cargo automatically searches for a file named .cargo/config.toml - in the current directory, and all parent directories. These - configuration files will be merged with the global configuration file. - - $CARGO_HOME/credentials.toml - ย ย ย ย Private authentication information for logging in to a registry. - - $CARGO_HOME/registry/ - ย ย ย ย This directory contains cached downloads of the registry index - and any downloaded dependencies. - - $CARGO_HOME/git/ - ย ย ย ย This directory contains cached downloads of git dependencies. - - Please note that the internal structure of the $CARGO_HOME directory is - not stable yet and may be subject to change. - -EXAMPLES - 1. Build a local package and all of its dependencies: - - cargo build - - 2. Build a package with optimizations: - - cargo build --release - - 3. Run tests for a cross-compiled target: - - cargo test --target i686-unknown-linux-gnu - - 4. Create a new package that builds an executable: - - cargo new foobar - - 5. Create a package in the current directory: - - mkdir foo && cd foo - cargo init . - - 6. Learn about a command's options and usage: - - cargo help clean - -BUGS - See for issues. - -SEE ALSO - rustc(1), rustdoc(1) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/description-install-root.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/description-install-root.md deleted file mode 100644 index 50cf51bae..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/description-install-root.md +++ /dev/null @@ -1,7 +0,0 @@ -The installation root is determined, in order of precedence: - -- `--root` option -- `CARGO_INSTALL_ROOT` environment variable -- `install.root` Cargo [config value](../reference/config.html) -- `CARGO_HOME` environment variable -- `$HOME/.cargo` diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/description-one-target.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/description-one-target.md deleted file mode 100644 index 7af18131f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/description-one-target.md +++ /dev/null @@ -1,4 +0,0 @@ -This command requires that only one target is being compiled when additional -arguments are provided. If more than one target is available for the current -package the filters of `--lib`, `--bin`, etc, must be used to select which -target is compiled. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-display.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-display.md deleted file mode 100644 index 917dac49c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-display.md +++ /dev/null @@ -1,24 +0,0 @@ -{{#option "`-v`" "`--verbose`"}} -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the `term.verbose` -[config value](../reference/config.html). -{{/option}} - -{{#option "`-q`" "`--quiet`"}} -Do not print cargo log messages. -May also be specified with the `term.quiet` -[config value](../reference/config.html). -{{/option}} - -{{#option "`--color` _when_"}} -Control when colored output is used. Valid values: - -- `auto` (default): Automatically detect if color support is available on the - terminal. -- `always`: Always display colors. -- `never`: Never display colors. - -May also be specified with the `term.color` -[config value](../reference/config.html). -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-future-incompat.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-future-incompat.md deleted file mode 100644 index 3a8a1e7b7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-future-incompat.md +++ /dev/null @@ -1,6 +0,0 @@ -{{#option "`--future-incompat-report`"}} -Displays a future-incompat report for any future-incompatible warnings -produced during execution of this command - -See {{man "cargo-report" 1}} -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-ignore-rust-version.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-ignore-rust-version.md deleted file mode 100644 index a151534e9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-ignore-rust-version.md +++ /dev/null @@ -1,4 +0,0 @@ -{{#option "`--ignore-rust-version`"}} -{{actionverb}} the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's `rust-version` field. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-index.md deleted file mode 100644 index b19b98365..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-index.md +++ /dev/null @@ -1,3 +0,0 @@ -{{#option "`--index` _index_"}} -The URL of the registry index to use. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-jobs.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-jobs.md deleted file mode 100644 index 7dc00e3de..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-jobs.md +++ /dev/null @@ -1,5 +0,0 @@ -{{#option "`-j` _N_" "`--jobs` _N_"}} -Number of parallel jobs to run. May also be specified with the -`build.jobs` [config value](../reference/config.html). Defaults to -the number of CPUs. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-locked.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-locked.md deleted file mode 100644 index c9ac9524e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-locked.md +++ /dev/null @@ -1,25 +0,0 @@ -{{#option "`--frozen`" "`--locked`"}} -Either of these flags requires that the `Cargo.lock` file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The `--frozen` flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date. - -These may be used in environments where you want to assert that the -`Cargo.lock` file is up-to-date (such as a CI build) or want to avoid network -access. -{{/option}} - -{{#option "`--offline`"}} -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. - -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the {{man "cargo-fetch" 1}} command to download dependencies before going -offline. - -May also be specified with the `net.offline` [config value](../reference/config.html). -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-manifest-path.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-manifest-path.md deleted file mode 100644 index b1d6eab76..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-manifest-path.md +++ /dev/null @@ -1,4 +0,0 @@ -{{#option "`--manifest-path` _path_" }} -Path to the `Cargo.toml` file. By default, Cargo searches for the -`Cargo.toml` file in the current directory or any parent directory. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-message-format.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-message-format.md deleted file mode 100644 index 0e798147a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-message-format.md +++ /dev/null @@ -1,21 +0,0 @@ -{{#option "`--message-format` _fmt_" }} -The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values: - -- `human` (default): Display in a human-readable text format. Conflicts with - `short` and `json`. -- `short`: Emit shorter, human-readable text messages. Conflicts with `human` - and `json`. -- `json`: Emit JSON messages to stdout. See - [the reference](../reference/external-tools.html#json-messages) - for more details. Conflicts with `human` and `short`. -- `json-diagnostic-short`: Ensure the `rendered` field of JSON messages contains - the "short" rendering from rustc. Cannot be used with `human` or `short`. -- `json-diagnostic-rendered-ansi`: Ensure the `rendered` field of JSON messages - contains embedded ANSI color codes for respecting rustc's default color - scheme. Cannot be used with `human` or `short`. -- `json-render-diagnostics`: Instruct Cargo to not include rustc diagnostics in - in JSON messages printed, but instead Cargo itself should render the - JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others - coming from rustc are still emitted. Cannot be used with `human` or `short`. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-new.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-new.md deleted file mode 100644 index e9792f05e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-new.md +++ /dev/null @@ -1,39 +0,0 @@ -{{#options}} - -{{#option "`--bin`" }} -Create a package with a binary target (`src/main.rs`). -This is the default behavior. -{{/option}} - -{{#option "`--lib`" }} -Create a package with a library target (`src/lib.rs`). -{{/option}} - -{{#option "`--edition` _edition_" }} -Specify the Rust edition to use. Default is 2021. -Possible values: 2015, 2018, 2021 -{{/option}} - -{{#option "`--name` _name_" }} -Set the package name. Defaults to the directory name. -{{/option}} - -{{#option "`--vcs` _vcs_" }} -Initialize a new VCS repository for the given version control system (git, -hg, pijul, or fossil) or do not initialize any version control at all -(none). If not specified, defaults to `git` or the configuration value -`cargo-new.vcs`, or `none` if already inside a VCS repository. -{{/option}} - -{{#option "`--registry` _registry_" }} -This sets the `publish` field in `Cargo.toml` to the given registry name -which will restrict publishing only to that registry. - -Registry names are defined in [Cargo config files](../reference/config.html). -If not specified, the default registry defined by the `registry.default` -config key is used. If the default registry is not set and `--registry` is not -used, the `publish` field will not be set which means that publishing will not -be restricted. -{{/option}} - -{{/options}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-profile-legacy-check.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-profile-legacy-check.md deleted file mode 100644 index 0ec82e693..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-profile-legacy-check.md +++ /dev/null @@ -1,10 +0,0 @@ -{{#option "`--profile` _name_" }} -{{actionverb}} with the given profile. - -As a special case, specifying the `test` profile will also enable checking in -test mode which will enable checking tests and enable the `test` cfg option. -See [rustc tests](https://doc.rust-lang.org/rustc/tests/index.html) for more -detail. - -See the [the reference](../reference/profiles.html) for more details on profiles. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-profile.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-profile.md deleted file mode 100644 index 2452e7b14..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-profile.md +++ /dev/null @@ -1,4 +0,0 @@ -{{#option "`--profile` _name_" }} -{{actionverb}} with the given profile. -See the [the reference](../reference/profiles.html) for more details on profiles. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-registry.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-registry.md deleted file mode 100644 index 23e170689..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-registry.md +++ /dev/null @@ -1,6 +0,0 @@ -{{#option "`--registry` _registry_"}} -Name of the registry to use. Registry names are defined in [Cargo config -files](../reference/config.html). If not specified, the default registry is used, -which is defined by the `registry.default` config key which defaults to -`crates-io`. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-release.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-release.md deleted file mode 100644 index 723dbba9f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-release.md +++ /dev/null @@ -1,4 +0,0 @@ -{{#option "`-r`" "`--release`"}} -{{actionverb}} optimized artifacts with the `release` profile. -See also the `--profile` option for choosing a specific profile by name. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-target-dir.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-target-dir.md deleted file mode 100644 index 3646e951f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-target-dir.md +++ /dev/null @@ -1,13 +0,0 @@ -{{#option "`--target-dir` _directory_"}} -Directory for all generated artifacts and intermediate files. May also be -specified with the `CARGO_TARGET_DIR` environment variable, or the -`build.target-dir` [config value](../reference/config.html). -{{#if temp-target-dir}} Defaults to a new temporary folder located in the -temporary directory of the platform. - -When using `--path`, by default it will use `target` directory in the workspace -of the local crate unless `--target-dir` -is specified. -{{else}} Defaults to `target` in the root of the workspace. -{{/if}} -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-target-triple.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-target-triple.md deleted file mode 100644 index 6ad03c6f4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-target-triple.md +++ /dev/null @@ -1,13 +0,0 @@ -{{#option "`--target` _triple_"}} -{{actionverb}} for the given architecture. The default is the host -architecture. The general format of the triple is -`---`. Run `rustc --print target-list` for a -list of supported targets. - -This may also be specified with the `build.target` -[config value](../reference/config.html). - -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -[build cache](../guide/build-cache.html) documentation for more details. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-targets-lib-bin.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-targets-lib-bin.md deleted file mode 100644 index 14342acfa..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-targets-lib-bin.md +++ /dev/null @@ -1,12 +0,0 @@ -{{#option "`--lib`" }} -{{actionverb}} the package's library. -{{/option}} - -{{#option "`--bin` _name_..." }} -{{actionverb}} the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -{{/option}} - -{{#option "`--bins`" }} -{{actionverb}} all binary targets. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-targets.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-targets.md deleted file mode 100644 index 3332001b0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-targets.md +++ /dev/null @@ -1,57 +0,0 @@ -Passing target selection flags will {{lower actionverb}} only the specified -targets. - -Note that `--bin`, `--example`, `--test` and `--bench` flags also -support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. - -{{#options}} - -{{> options-targets-lib-bin }} - -{{#option "`--example` _name_..." }} -{{actionverb}} the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -{{/option}} - -{{#option "`--examples`" }} -{{actionverb}} all example targets. -{{/option}} - -{{#option "`--test` _name_..." }} -{{actionverb}} the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns. -{{/option}} - -{{#option "`--tests`" }} -{{actionverb}} all targets in test mode that have the `test = true` manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the `test` flag in the -manifest settings for the target. -{{/option}} - -{{#option "`--bench` _name_..." }} -{{actionverb}} the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns. -{{/option}} - -{{#option "`--benches`" }} -{{actionverb}} all targets in benchmark mode that have the `bench = true` -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the `bench` flag in the -manifest settings for the target. -{{/option}} - -{{#option "`--all-targets`" }} -{{actionverb}} all targets. This is equivalent to specifying `--lib --bins ---tests --benches --examples`. -{{/option}} - -{{/options}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-test.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-test.md deleted file mode 100644 index 1d2447e8d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-test.md +++ /dev/null @@ -1,14 +0,0 @@ -{{#options}} - -{{#option "`--no-run`" }} -Compile, but don't run {{nouns}}. -{{/option}} - -{{#option "`--no-fail-fast`" }} -Run all {{nouns}} regardless of failure. Without this flag, Cargo will exit -after the first executable fails. The Rust test harness will run all {{nouns}} -within the executable to completion, this flag only applies to the executable -as a whole. -{{/option}} - -{{/options}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-token.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-token.md deleted file mode 100644 index 855204de2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/options-token.md +++ /dev/null @@ -1,11 +0,0 @@ -{{#option "`--token` _token_" }} -API token to use when authenticating. This overrides the token stored in -the credentials file (which is created by {{man "cargo-login" 1}}). - -[Cargo config](../reference/config.html) environment variables can be -used to override the tokens stored in the credentials file. The token for -crates.io may be specified with the `CARGO_REGISTRY_TOKEN` environment -variable. Tokens for other registries may be specified with environment -variables of the form `CARGO_REGISTRIES_NAME_TOKEN` where `NAME` is the name -of the registry in all capital letters. -{{/option}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-environment.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-environment.md deleted file mode 100644 index aae5f078a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-environment.md +++ /dev/null @@ -1,4 +0,0 @@ -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-exit-status.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-exit-status.md deleted file mode 100644 index a8123366d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-exit-status.md +++ /dev/null @@ -1,4 +0,0 @@ -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-features.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-features.md deleted file mode 100644 index f4947f7f5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-features.md +++ /dev/null @@ -1,26 +0,0 @@ -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -{{#options}} - -{{#option "`--features` _features_" }} -Space or comma separated list of features to activate. Features of workspace -members may be enabled with `package-name/feature-name` syntax. This flag may -be specified multiple times, which enables all specified features. -{{/option}} - -{{#option "`--all-features`" }} -Activate all available features of all selected packages. -{{/option}} - -{{#option "`--no-default-features`" }} -Do not activate the `default` feature of the selected packages. -{{/option}} - -{{/options}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-options-common.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-options-common.md deleted file mode 100644 index 12a0a5bda..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-options-common.md +++ /dev/null @@ -1,21 +0,0 @@ -### Common Options - -{{#options}} - -{{#option "`+`_toolchain_"}} -If Cargo has been installed with rustup, and the first argument to `cargo` -begins with `+`, it will be interpreted as a rustup toolchain name (such -as `+stable` or `+nightly`). -See the [rustup documentation](https://rust-lang.github.io/rustup/overrides.html) -for more information about how toolchain overrides work. -{{/option}} - -{{#option "`-h`" "`--help`"}} -Prints help information. -{{/option}} - -{{#option "`-Z` _flag_"}} -Unstable (nightly-only) flags to Cargo. Run `cargo -Z help` for details. -{{/option}} - -{{/options}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-options-package.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-options-package.md deleted file mode 100644 index 4fa732dd3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-options-package.md +++ /dev/null @@ -1,13 +0,0 @@ -### Package Selection - -By default, the package in the current working directory is selected. The `-p` -flag can be used to choose a different package in a workspace. - -{{#options}} - -{{#option "`-p` _spec_" "`--package` _spec_" }} -The package to {{lower actionverb}}. See {{man "cargo-pkgid" 1}} for the SPEC -format. -{{/option}} - -{{/options}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-package-selection.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-package-selection.md deleted file mode 100644 index 8d7d62180..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/man/includes/section-package-selection.md +++ /dev/null @@ -1,42 +0,0 @@ -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -{{#options}} - -{{#option "`-p` _spec_..." "`--package` _spec_..."}} -{{actionverb}} only the specified packages. See {{man "cargo-pkgid" 1}} for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like `*`, `?` and `[]`. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -{{/option}} - -{{#option "`--workspace`" }} -{{actionverb}} all members in the workspace. -{{/option}} - -{{#unless noall}} -{{#option "`--all`" }} -Deprecated alias for `--workspace`. -{{/option}} -{{/unless}} - -{{#option "`--exclude` _SPEC_..." }} -Exclude the specified packages. Must be used in conjunction with the -`--workspace` flag. This flag may be specified multiple times and supports -common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -{{/option}} - -{{/options}} diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/SUMMARY.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/SUMMARY.md deleted file mode 100644 index e5c584422..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/SUMMARY.md +++ /dev/null @@ -1,86 +0,0 @@ -# Summary - -[Introduction](index.md) - -* [Getting Started](getting-started/index.md) - * [Installation](getting-started/installation.md) - * [First Steps with Cargo](getting-started/first-steps.md) - -* [Cargo Guide](guide/index.md) - * [Why Cargo Exists](guide/why-cargo-exists.md) - * [Creating a New Package](guide/creating-a-new-project.md) - * [Working on an Existing Package](guide/working-on-an-existing-project.md) - * [Dependencies](guide/dependencies.md) - * [Package Layout](guide/project-layout.md) - * [Cargo.toml vs Cargo.lock](guide/cargo-toml-vs-cargo-lock.md) - * [Tests](guide/tests.md) - * [Continuous Integration](guide/continuous-integration.md) - * [Cargo Home](guide/cargo-home.md) - * [Build Cache](guide/build-cache.md) - -* [Cargo Reference](reference/index.md) - * [Specifying Dependencies](reference/specifying-dependencies.md) - * [Overriding Dependencies](reference/overriding-dependencies.md) - * [The Manifest Format](reference/manifest.md) - * [Cargo Targets](reference/cargo-targets.md) - * [Workspaces](reference/workspaces.md) - * [Features](reference/features.md) - * [Features Examples](reference/features-examples.md) - * [Profiles](reference/profiles.md) - * [Configuration](reference/config.md) - * [Environment Variables](reference/environment-variables.md) - * [Build Scripts](reference/build-scripts.md) - * [Build Script Examples](reference/build-script-examples.md) - * [Publishing on crates.io](reference/publishing.md) - * [Package ID Specifications](reference/pkgid-spec.md) - * [Source Replacement](reference/source-replacement.md) - * [External Tools](reference/external-tools.md) - * [Registries](reference/registries.md) - * [Dependency Resolution](reference/resolver.md) - * [SemVer Compatibility](reference/semver.md) - * [Future incompat report](reference/future-incompat-report.md) - * [Unstable Features](reference/unstable.md) - -* [Cargo Commands](commands/index.md) - * [General Commands](commands/general-commands.md) - * [cargo](commands/cargo.md) - * [cargo help](commands/cargo-help.md) - * [cargo version](commands/cargo-version.md) - * [Build Commands](commands/build-commands.md) - * [cargo bench](commands/cargo-bench.md) - * [cargo build](commands/cargo-build.md) - * [cargo check](commands/cargo-check.md) - * [cargo clean](commands/cargo-clean.md) - * [cargo doc](commands/cargo-doc.md) - * [cargo fetch](commands/cargo-fetch.md) - * [cargo fix](commands/cargo-fix.md) - * [cargo run](commands/cargo-run.md) - * [cargo rustc](commands/cargo-rustc.md) - * [cargo rustdoc](commands/cargo-rustdoc.md) - * [cargo test](commands/cargo-test.md) - * [cargo report](commands/cargo-report.md) - * [Manifest Commands](commands/manifest-commands.md) - * [cargo generate-lockfile](commands/cargo-generate-lockfile.md) - * [cargo locate-project](commands/cargo-locate-project.md) - * [cargo metadata](commands/cargo-metadata.md) - * [cargo pkgid](commands/cargo-pkgid.md) - * [cargo tree](commands/cargo-tree.md) - * [cargo update](commands/cargo-update.md) - * [cargo vendor](commands/cargo-vendor.md) - * [cargo verify-project](commands/cargo-verify-project.md) - * [Package Commands](commands/package-commands.md) - * [cargo init](commands/cargo-init.md) - * [cargo install](commands/cargo-install.md) - * [cargo new](commands/cargo-new.md) - * [cargo search](commands/cargo-search.md) - * [cargo uninstall](commands/cargo-uninstall.md) - * [Publishing Commands](commands/publishing-commands.md) - * [cargo login](commands/cargo-login.md) - * [cargo owner](commands/cargo-owner.md) - * [cargo package](commands/cargo-package.md) - * [cargo publish](commands/cargo-publish.md) - * [cargo yank](commands/cargo-yank.md) - -* [FAQ](faq.md) -* [Appendix: Glossary](appendix/glossary.md) -* [Appendix: Git Authentication](appendix/git-authentication.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/appendix/git-authentication.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/appendix/git-authentication.md deleted file mode 100644 index 7fe1f149a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/appendix/git-authentication.md +++ /dev/null @@ -1,66 +0,0 @@ -# Git Authentication - -Cargo supports some forms of authentication when using git dependencies and -registries. This appendix contains some information for setting up git -authentication in a way that works with Cargo. - -If you need other authentication methods, the [`net.git-fetch-with-cli`] -config value can be set to cause Cargo to execute the `git` executable to -handle fetching remote repositories instead of using the built-in support. -This can be enabled with the `CARGO_NET_GIT_FETCH_WITH_CLI=true` environment -variable. - -## HTTPS authentication - -HTTPS authentication requires the [`credential.helper`] mechanism. There are -multiple credential helpers, and you specify the one you want to use in your -global git configuration file. - -```ini -# ~/.gitconfig - -[credential] -helper = store -``` - -Cargo does not ask for passwords, so for most helpers you will need to give -the helper the initial username/password before running Cargo. One way to do -this is to run `git clone` of the private git repo and enter the -username/password. - -> **Tip:**
-> macOS users may want to consider using the osxkeychain helper.
-> Windows users may want to consider using the [GCM] helper. - -> **Note:** Windows users will need to make sure that the `sh` shell is -> available in your `PATH`. This typically is available with the Git for -> Windows installation. - -## SSH authentication - -SSH authentication requires `ssh-agent` to be running to acquire the SSH key. -Make sure the appropriate environment variables are set up (`SSH_AUTH_SOCK` on -most Unix-like systems), and that the correct keys are added (with `ssh-add`). - -Windows can use Pageant (part of [PuTTY]) or `ssh-agent`. -To use `ssh-agent`, Cargo needs to use the OpenSSH that is distributed as part -of Windows, as Cargo does not support the simulated Unix-domain sockets used -by MinGW or Cygwin. -More information about installing with Windows can be found at the [Microsoft -installation documentation] and the page on [key management] has instructions -on how to start `ssh-agent` and to add keys. - -> **Note:** Cargo does not support git's shorthand SSH URLs like -> `git@example.com:user/repo.git`. Use a full SSH URL like -> `ssh://git@example.com/user/repo.git`. - -> **Note:** SSH configuration files (like OpenSSH's `~/.ssh/config`) are not -> used by Cargo's built-in SSH library. More advanced requirements should use -> [`net.git-fetch-with-cli`]. - -[`credential.helper`]: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage -[`net.git-fetch-with-cli`]: ../reference/config.md#netgit-fetch-with-cli -[GCM]: https://github.com/microsoft/Git-Credential-Manager-Core/ -[PuTTY]: https://www.chiark.greenend.org.uk/~sgtatham/putty/ -[Microsoft installation documentation]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_install_firstuse -[key management]: https://docs.microsoft.com/en-us/windows-server/administration/openssh/openssh_keymanagement diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/appendix/glossary.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/appendix/glossary.md deleted file mode 100644 index 9ac0561e9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/appendix/glossary.md +++ /dev/null @@ -1,274 +0,0 @@ -# Glossary - -### Artifact - -An *artifact* is the file or set of files created as a result of the -compilation process. This includes linkable libraries, executable binaries, -and generated documentation. - -### Cargo - -*Cargo* is the Rust [*package manager*](#package-manager), and the primary -topic of this book. - -### Cargo.lock - -See [*lock file*](#lock-file). - -### Cargo.toml - -See [*manifest*](#manifest). - -### Crate - -A Rust *crate* is either a library or an executable program, referred to as -either a *library crate* or a *binary crate*, respectively. - -Every [target](#target) defined for a Cargo [package](#package) is a *crate*. - -Loosely, the term *crate* may refer to either the source code of the target or -to the compiled artifact that the target produces. It may also refer to a -compressed package fetched from a [registry](#registry). - -The source code for a given crate may be subdivided into [*modules*](#module). - -### Edition - -A *Rust edition* is a developmental landmark of the Rust language. The -[edition of a package][edition-field] is specified in the `Cargo.toml` -[manifest](#manifest), and individual targets can specify which edition they -use. See the [Edition Guide] for more information. - -### Feature - -The meaning of *feature* depends on the context: - -- A [*feature*][feature] is a named flag which allows for conditional - compilation. A feature can refer to an optional dependency, or an arbitrary - name defined in a `Cargo.toml` [manifest](#manifest) that can be checked - within source code. - -- Cargo has [*unstable feature flags*][cargo-unstable] which can be used to - enable experimental behavior of Cargo itself. - -- The Rust compiler and Rustdoc have their own unstable feature flags (see - [The Unstable Book][unstable-book] and [The Rustdoc - Book][rustdoc-unstable]). - -- CPU targets have [*target features*][target-feature] which specify - capabilities of a CPU. - -### Index - -The *index* is the searchable list of [*crates*](#crate) in a -[*registry*](#registry). - -### Lock file - -The `Cargo.lock` *lock file* is a file that captures the exact version of -every dependency used in a [*workspace*](#workspace) or -[*package*](#package). It is automatically generated by Cargo. See -[Cargo.toml vs Cargo.lock]. - -### Manifest - -A [*manifest*][manifest] is a description of a [package](#package) or a -[workspace](#workspace) in a file named `Cargo.toml`. - -A [*virtual manifest*][virtual] is a `Cargo.toml` file that only describes a -workspace, and does not include a package. - -### Member - -A *member* is a [*package*](#package) that belongs to a -[*workspace*](#workspace). - -### Module - -Rust's module system is used to organize code into logical units called -*modules*, which provide isolated namespaces within the code. - -The source code for a given [crate](#crate) may be subdivided into one or more -separate modules. This is usually done to organize the code into areas of -related functionality or to control the visible scope (public/private) of -symbols within the source (structs, functions, and so on). - -A [`Cargo.toml`](#manifest) file is primarily concerned with the -[package](#package) it defines, its crates, and the packages of the crates on -which they depend. Nevertheless, you will see the term "module" often when -working with Rust, so you should understand its relationship to a given crate. - -### Package - -A *package* is a collection of source files and a `Cargo.toml` -[*manifest*](#manifest) file which describes the package. A package has a name -and version which is used for specifying dependencies between packages. - -A package contains multiple [*targets*](#target), each of which is a -[*crate*](#crate). The `Cargo.toml` file describes the type of the crates -(binary or library) within the package, along with some metadata about each -one -- how each is to be built, what their direct dependencies are, etc., as -described throughout this book. - -The *package root* is the directory where the package's `Cargo.toml` manifest -is located. (Compare with [*workspace root*](#workspace).) - -The [*package ID specification*][pkgid-spec], or *SPEC*, is a string used to -uniquely reference a specific version of a package from a specific source. - -Small to medium sized Rust projects will only need a single package, though it -is common for them to have multiple crates. - -Larger projects may involve multiple packages, in which case Cargo -[*workspaces*](#workspace) can be used to manage common dependencies and other -related metadata between the packages. - -### Package manager - -Broadly speaking, a *package manager* is a program (or collection of related -programs) in a software ecosystem that automates the process of obtaining, -installing, and upgrading artifacts. Within a programming language ecosystem, -a package manager is a developer-focused tool whose primary functionality is -to download library artifacts and their dependencies from some central -repository; this capability is often combined with the ability to perform -software builds (by invoking the language-specific compiler). - -[*Cargo*](#cargo) is the package manager within the Rust ecosystem. Cargo -downloads your Rust [package](#package)โ€™s dependencies -([*artifacts*](#artifact) known as [*crates*](#crate)), compiles your -packages, makes distributable packages, and (optionally) uploads them to -[crates.io][], the Rust communityโ€™s [*package registry*](#registry). - -### Package registry - -See [*registry*](#registry). - -### Project - -Another name for a [package](#package). - -### Registry - -A *registry* is a service that contains a collection of downloadable -[*crates*](#crate) that can be installed or used as dependencies for a -[*package*](#package). The default registry in the Rust ecosystem is -[crates.io](https://crates.io). The registry has an [*index*](#index) which -contains a list of all crates, and tells Cargo how to download the crates that -are needed. - -### Source - -A *source* is a provider that contains [*crates*](#crate) that may be included -as dependencies for a [*package*](#package). There are several kinds of -sources: - -- **Registry source** โ€” See [registry](#registry). -- **Local registry source** โ€” A set of crates stored as compressed files on - the filesystem. See [Local Registry Sources]. -- **Directory source** โ€” A set of crates stored as uncompressed files on the - filesystem. See [Directory Sources]. -- **Path source** โ€” An individual package located on the filesystem (such as a - [path dependency]) or a set of multiple packages (such as [path overrides]). -- **Git source** โ€” Packages located in a git repository (such as a [git - dependency] or [git source]). - -See [Source Replacement] for more information. - -### Spec - -See [package ID specification](#package). - -### Target - -The meaning of the term *target* depends on the context: - -- **Cargo Target** โ€” Cargo [*packages*](#package) consist of *targets* which - correspond to [*artifacts*](#artifact) that will be produced. Packages can - have library, binary, example, test, and benchmark targets. The - [list of targets][targets] are configured in the `Cargo.toml` - [*manifest*](#manifest), often inferred automatically by the [directory - layout] of the source files. -- **Target Directory** โ€” Cargo places all built artifacts and intermediate - files in the *target* directory. By default this is a directory named - `target` at the [*workspace*](#workspace) root, or the package root if not - using a workspace. The directory may be changed with the `--target-dir` - command-line option, the `CARGO_TARGET_DIR` [environment variable], or the - `build.target-dir` [config option]. -- **Target Architecture** โ€” The OS and machine architecture for the built - artifacts are typically referred to as a *target*. -- **Target Triple** โ€” A triple is a specific format for specifying a target - architecture. Triples may be referred to as a *target triple* which is the - architecture for the artifact produced, and the *host triple* which is the - architecture that the compiler is running on. The target triple can be - specified with the `--target` command-line option or the `build.target` - [config option]. The general format of the triple is - `---` where: - - - `arch` = The base CPU architecture, for example `x86_64`, `i686`, `arm`, - `thumb`, `mips`, etc. - - `sub` = The CPU sub-architecture, for example `arm` has `v7`, `v7s`, - `v5te`, etc. - - `vendor` = The vendor, for example `unknown`, `apple`, `pc`, `nvidia`, etc. - - `sys` = The system name, for example `linux`, `windows`, `darwin`, etc. - `none` is typically used for bare-metal without an OS. - - `abi` = The ABI, for example `gnu`, `android`, `eabi`, etc. - - Some parameters may be omitted. Run `rustc --print target-list` for a list of - supported targets. - -### Test Targets - -Cargo *test targets* generate binaries which help verify proper operation and -correctness of code. There are two types of test artifacts: - -* **Unit test** โ€” A *unit test* is an executable binary compiled directly from - a library or a binary target. It contains the entire contents of the library - or binary code, and runs `#[test]` annotated functions, intended to verify - individual units of code. -* **Integration test target** โ€” An [*integration test - target*][integration-tests] is an executable binary compiled from a *test - target* which is a distinct [*crate*](#crate) whose source is located in the - `tests` directory or specified by the [`[[test]]` table][targets] in the - `Cargo.toml` [*manifest*](#manifest). It is intended to only test the public - API of a library, or execute a binary to verify its operation. - -### Workspace - -A [*workspace*][workspace] is a collection of one or more -[*packages*](#package) that share common dependency resolution (with a shared -`Cargo.lock` [*lock file*](#lock-file)), output directory, and various -settings such as profiles. - -A [*virtual workspace*][virtual] is a workspace where the root `Cargo.toml` -[*manifest*](#manifest) does not define a package, and only lists the -workspace [*members*](#member). - -The *workspace root* is the directory where the workspace's `Cargo.toml` -manifest is located. (Compare with [*package root*](#package).) - - -[Cargo.toml vs Cargo.lock]: ../guide/cargo-toml-vs-cargo-lock.md -[Directory Sources]: ../reference/source-replacement.md#directory-sources -[Local Registry Sources]: ../reference/source-replacement.md#local-registry-sources -[Source Replacement]: ../reference/source-replacement.md -[cargo-unstable]: ../reference/unstable.md -[config option]: ../reference/config.md -[crates.io]: https://crates.io/ -[directory layout]: ../guide/project-layout.md -[edition guide]: ../../edition-guide/index.html -[edition-field]: ../reference/manifest.md#the-edition-field -[environment variable]: ../reference/environment-variables.md -[feature]: ../reference/features.md -[git dependency]: ../reference/specifying-dependencies.md#specifying-dependencies-from-git-repositories -[git source]: ../reference/source-replacement.md -[integration-tests]: ../reference/cargo-targets.md#integration-tests -[manifest]: ../reference/manifest.md -[path dependency]: ../reference/specifying-dependencies.md#specifying-path-dependencies -[path overrides]: ../reference/overriding-dependencies.md#paths-overrides -[pkgid-spec]: ../reference/pkgid-spec.md -[rustdoc-unstable]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html -[target-feature]: ../../reference/attributes/codegen.html#the-target_feature-attribute -[targets]: ../reference/cargo-targets.md#configuring-a-target -[unstable-book]: https://doc.rust-lang.org/nightly/unstable-book/index.html -[virtual]: ../reference/workspaces.md -[workspace]: ../reference/workspaces.md diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/build-commands.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/build-commands.md deleted file mode 100644 index 5cc6fff96..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/build-commands.md +++ /dev/null @@ -1,13 +0,0 @@ -# Build Commands -* [cargo bench](cargo-bench.md) -* [cargo build](cargo-build.md) -* [cargo check](cargo-check.md) -* [cargo clean](cargo-clean.md) -* [cargo doc](cargo-doc.md) -* [cargo fetch](cargo-fetch.md) -* [cargo fix](cargo-fix.md) -* [cargo run](cargo-run.md) -* [cargo rustc](cargo-rustc.md) -* [cargo rustdoc](cargo-rustdoc.md) -* [cargo test](cargo-test.md) -* [cargo report](cargo-report.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-bench.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-bench.md deleted file mode 100644 index 88cbe5d62..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-bench.md +++ /dev/null @@ -1,444 +0,0 @@ -# cargo-bench(1) - - - -## NAME - -cargo-bench - Execute benchmarks of a package - -## SYNOPSIS - -`cargo bench` [_options_] [_benchname_] [`--` _bench-options_] - -## DESCRIPTION - -Compile and execute benchmarks. - -The benchmark filtering argument _benchname_ and all the arguments following -the two dashes (`--`) are passed to the benchmark binaries and thus to -_libtest_ (rustc's built in unit-test and micro-benchmarking framework). If -you are passing arguments to both Cargo and the binary, the ones after `--` go -to the binary, the ones before go to Cargo. For details about libtest's -arguments see the output of `cargo bench -- --help` and check out the rustc -book's chapter on how tests work at -. - -As an example, this will run only the benchmark named `foo` (and skip other -similarly named benchmarks like `foobar`): - - cargo bench -- foo --exact - -Benchmarks are built with the `--test` option to `rustc` which creates an -executable with a `main` function that automatically runs all functions -annotated with the `#[bench]` attribute. Cargo passes the `--bench` flag to -the test harness to tell it to run only benchmarks. - -The libtest harness may be disabled by setting `harness = false` in the target -manifest settings, in which case your code will need to provide its own `main` -function to handle running benchmarks. - -> **Note**: The -> [`#[bench]` attribute](https://doc.rust-lang.org/nightly/unstable-book/library-features/test.html) -> is currently unstable and only available on the -> [nightly channel](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html). -> There are some packages available on -> [crates.io](https://crates.io/keywords/benchmark) that may help with -> running benchmarks on the stable channel, such as -> [Criterion](https://crates.io/crates/criterion). - -By default, `cargo bench` uses the [`bench` profile], which enables -optimizations and disables debugging information. If you need to debug a -benchmark, you can use the `--profile=dev` command-line option to switch to -the dev profile. You can then run the debug-enabled benchmark within a -debugger. - -[`bench` profile]: ../reference/profiles.html#bench - -## OPTIONS - -### Benchmark Options - -

- -
--no-run
-
Compile, but don't run benchmarks.
- - -
--no-fail-fast
-
Run all benchmarks regardless of failure. Without this flag, Cargo will exit -after the first executable fails. The Rust test harness will run all benchmarks -within the executable to completion, this flag only applies to the executable -as a whole.
- - -
- - -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -
- -
-p spec...
-
--package spec...
-
Benchmark only the specified packages. See cargo-pkgid(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like *, ? and []. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern.
- - -
--workspace
-
Benchmark all members in the workspace.
- - - -
--all
-
Deprecated alias for --workspace.
- - - -
--exclude SPEC...
-
Exclude the specified packages. Must be used in conjunction with the ---workspace flag. This flag may be specified multiple times and supports -common Unix glob patterns like *, ? and []. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo bench` will build the -following targets of the selected packages: - -- lib โ€” used to link with binaries and benchmarks -- bins (only if benchmark targets are built and required features are - available) -- lib as a benchmark -- bins as benchmarks -- benchmark targets - -The default behavior can be changed by setting the `bench` flag for the target -in the manifest settings. Setting examples to `bench = true` will build and -run the example as a benchmark. Setting targets to `bench = false` will stop -them from being benchmarked by default. Target selection options that take a -target by name ignore the `bench` flag and will always benchmark the given -target. - -Passing target selection flags will benchmark only the specified -targets. - -Note that `--bin`, `--example`, `--test` and `--bench` flags also -support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. - -
- -
--lib
-
Benchmark the package's library.
- - -
--bin name...
-
Benchmark the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--bins
-
Benchmark all binary targets.
- - - -
--example name...
-
Benchmark the specified example. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--examples
-
Benchmark all example targets.
- - -
--test name...
-
Benchmark the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns.
- - -
--tests
-
Benchmark all targets in test mode that have the test = true manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the test flag in the -manifest settings for the target.
- - -
--bench name...
-
Benchmark the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns.
- - -
--benches
-
Benchmark all targets in benchmark mode that have the bench = true -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the bench flag in the -manifest settings for the target.
- - -
--all-targets
-
Benchmark all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
- - -
- - -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Benchmark for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
--profile name
-
Benchmark with the given profile. -See the the reference for more details on profiles.
- - - -
--ignore-rust-version
-
Benchmark the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - -
- -### Display Options - -By default the Rust test harness hides output from benchmark execution to keep -results readable. Benchmark output can be recovered (e.g., for debugging) by -passing `--nocapture` to the benchmark binaries: - - cargo bench -- --nocapture - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -The `--jobs` argument affects the building of the benchmark executable but -does not affect how many threads are used when running the benchmarks. The -Rust test harness runs benchmarks serially in a single thread. - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Build and execute all the benchmarks of the current package: - - cargo bench - -2. Run only a specific benchmark within a specific benchmark target: - - cargo bench --bench bench_name -- modname::some_benchmark - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-test(1)](cargo-test.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-build.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-build.md deleted file mode 100644 index 22b96acbe..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-build.md +++ /dev/null @@ -1,391 +0,0 @@ -# cargo-build(1) - - -## NAME - -cargo-build - Compile the current package - -## SYNOPSIS - -`cargo build` [_options_] - -## DESCRIPTION - -Compile local packages and all of their dependencies. - -## OPTIONS - -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -
- -
-p spec...
-
--package spec...
-
Build only the specified packages. See cargo-pkgid(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like *, ? and []. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern.
- - -
--workspace
-
Build all members in the workspace.
- - - -
--all
-
Deprecated alias for --workspace.
- - - -
--exclude SPEC...
-
Exclude the specified packages. Must be used in conjunction with the ---workspace flag. This flag may be specified multiple times and supports -common Unix glob patterns like *, ? and []. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo build` will build all -binary and library targets of the selected packages. Binaries are skipped if -they have `required-features` that are missing. - -Passing target selection flags will build only the specified -targets. - -Note that `--bin`, `--example`, `--test` and `--bench` flags also -support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. - -
- -
--lib
-
Build the package's library.
- - -
--bin name...
-
Build the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--bins
-
Build all binary targets.
- - - -
--example name...
-
Build the specified example. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--examples
-
Build all example targets.
- - -
--test name...
-
Build the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns.
- - -
--tests
-
Build all targets in test mode that have the test = true manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the test flag in the -manifest settings for the target.
- - -
--bench name...
-
Build the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns.
- - -
--benches
-
Build all targets in benchmark mode that have the bench = true -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the bench flag in the -manifest settings for the target.
- - -
--all-targets
-
Build all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
- - -
- - -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Build for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
-r
-
--release
-
Build optimized artifacts with the release profile. -See also the --profile option for choosing a specific profile by name.
- - - -
--profile name
-
Build with the given profile. -See the the reference for more details on profiles.
- - - -
--ignore-rust-version
-
Build the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - - -
--out-dir directory
-
Copy final artifacts to this directory.

-

This option is unstable and available only on the -nightly channel -and requires the -Z unstable-options flag to enable. -See https://github.com/rust-lang/cargo/issues/6790 for more information.

- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - - -
--build-plan
-
Outputs a series of JSON messages to stdout that indicate the commands to run -the build.

-

This option is unstable and available only on the -nightly channel -and requires the -Z unstable-options flag to enable. -See https://github.com/rust-lang/cargo/issues/5579 for more information.

- -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
--future-incompat-report
-
Displays a future-incompat report for any future-incompatible warnings -produced during execution of this command

-

See cargo-report(1)

- - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Build the local package and all of its dependencies: - - cargo build - -2. Build with optimizations: - - cargo build --release - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-rustc(1)](cargo-rustc.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-check.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-check.md deleted file mode 100644 index 655775a6d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-check.md +++ /dev/null @@ -1,382 +0,0 @@ -# cargo-check(1) - - -## NAME - -cargo-check - Check the current package - -## SYNOPSIS - -`cargo check` [_options_] - -## DESCRIPTION - -Check a local package and all of its dependencies for errors. This will -essentially compile the packages without performing the final step of code -generation, which is faster than running `cargo build`. The compiler will save -metadata files to disk so that future runs will reuse them if the source has -not been modified. Some diagnostics and errors are only emitted during code -generation, so they inherently won't be reported with `cargo check`. - -## OPTIONS - -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -
- -
-p spec...
-
--package spec...
-
Check only the specified packages. See cargo-pkgid(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like *, ? and []. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern.
- - -
--workspace
-
Check all members in the workspace.
- - - -
--all
-
Deprecated alias for --workspace.
- - - -
--exclude SPEC...
-
Exclude the specified packages. Must be used in conjunction with the ---workspace flag. This flag may be specified multiple times and supports -common Unix glob patterns like *, ? and []. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo check` will check all -binary and library targets of the selected packages. Binaries are skipped if -they have `required-features` that are missing. - -Passing target selection flags will check only the specified -targets. - -Note that `--bin`, `--example`, `--test` and `--bench` flags also -support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. - -
- -
--lib
-
Check the package's library.
- - -
--bin name...
-
Check the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--bins
-
Check all binary targets.
- - - -
--example name...
-
Check the specified example. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--examples
-
Check all example targets.
- - -
--test name...
-
Check the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns.
- - -
--tests
-
Check all targets in test mode that have the test = true manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the test flag in the -manifest settings for the target.
- - -
--bench name...
-
Check the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns.
- - -
--benches
-
Check all targets in benchmark mode that have the bench = true -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the bench flag in the -manifest settings for the target.
- - -
--all-targets
-
Check all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
- - -
- - -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Check for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
-r
-
--release
-
Check optimized artifacts with the release profile. -See also the --profile option for choosing a specific profile by name.
- - - -
--profile name
-
Check with the given profile.

-

As a special case, specifying the test profile will also enable checking in -test mode which will enable checking tests and enable the test cfg option. -See rustc tests for more -detail.

-

See the the reference for more details on profiles.

- - - -
--ignore-rust-version
-
Check the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
--future-incompat-report
-
Displays a future-incompat report for any future-incompatible warnings -produced during execution of this command

-

See cargo-report(1)

- - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Check the local package for errors: - - cargo check - -2. Check all targets, including unit tests: - - cargo check --all-targets --profile=test - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-clean.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-clean.md deleted file mode 100644 index 4f38f03b1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-clean.md +++ /dev/null @@ -1,188 +0,0 @@ -# cargo-clean(1) - - -## NAME - -cargo-clean - Remove generated artifacts - -## SYNOPSIS - -`cargo clean` [_options_] - -## DESCRIPTION - -Remove artifacts from the target directory that Cargo has generated in the -past. - -With no options, `cargo clean` will delete the entire target directory. - -## OPTIONS - -### Package Selection - -When no packages are selected, all packages and all dependencies in the -workspace are cleaned. - -
-
-p spec...
-
--package spec...
-
Clean only the specified packages. This flag may be specified -multiple times. See cargo-pkgid(1) for the SPEC format.
- -
- -### Clean Options - -
- -
--doc
-
This option will cause cargo clean to remove only the doc directory in -the target directory.
- - -
--release
-
Remove all artifacts in the release directory.
- - -
--profile name
-
Remove all artifacts in the directory with the given profile name.
- - -
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - - -
--target triple
-
Clean for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Remove the entire target directory: - - cargo clean - -2. Remove only the release artifacts: - - cargo clean --release - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-doc.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-doc.md deleted file mode 100644 index 13eb90b6e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-doc.md +++ /dev/null @@ -1,347 +0,0 @@ -# cargo-doc(1) - - -## NAME - -cargo-doc - Build a package's documentation - -## SYNOPSIS - -`cargo doc` [_options_] - -## DESCRIPTION - -Build the documentation for the local package and all dependencies. The output -is placed in `target/doc` in rustdoc's usual format. - -## OPTIONS - -### Documentation Options - -
- -
--open
-
Open the docs in a browser after building them. This will use your default -browser unless you define another one in the BROWSER environment variable -or use the doc.browser configuration -option.
- - -
--no-deps
-
Do not build documentation for dependencies.
- - -
--document-private-items
-
Include non-public items in the documentation. This will be enabled by default if documenting a binary target.
- - -
- -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -
- -
-p spec...
-
--package spec...
-
Document only the specified packages. See cargo-pkgid(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like *, ? and []. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern.
- - -
--workspace
-
Document all members in the workspace.
- - - -
--all
-
Deprecated alias for --workspace.
- - - -
--exclude SPEC...
-
Exclude the specified packages. Must be used in conjunction with the ---workspace flag. This flag may be specified multiple times and supports -common Unix glob patterns like *, ? and []. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo doc` will document all -binary and library targets of the selected package. The binary will be skipped -if its name is the same as the lib target. Binaries are skipped if they have -`required-features` that are missing. - -The default behavior can be changed by setting `doc = false` for the target in -the manifest settings. Using target selection options will ignore the `doc` -flag and will always document the given target. - -
-
--lib
-
Document the package's library.
- - -
--bin name...
-
Document the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--bins
-
Document all binary targets.
- - - -
--example name...
-
Document the specified example. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--examples
-
Document all example targets.
- - -
- -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Document for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
-r
-
--release
-
Document optimized artifacts with the release profile. -See also the --profile option for choosing a specific profile by name.
- - - -
--profile name
-
Document with the given profile. -See the the reference for more details on profiles.
- - - -
--ignore-rust-version
-
Document the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Build the local package documentation and its dependencies and output to - `target/doc`. - - cargo doc - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-rustdoc(1)](cargo-rustdoc.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-fetch.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-fetch.md deleted file mode 100644 index 675d99e60..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-fetch.md +++ /dev/null @@ -1,157 +0,0 @@ -# cargo-fetch(1) - - -## NAME - -cargo-fetch - Fetch dependencies of a package from the network - -## SYNOPSIS - -`cargo fetch` [_options_] - -## DESCRIPTION - -If a `Cargo.lock` file is available, this command will ensure that all of the -git dependencies and/or registry dependencies are downloaded and locally -available. Subsequent Cargo commands never touch the network after a `cargo -fetch` unless the lock file changes. - -If the lock file is not available, then this command will generate the lock -file before fetching the dependencies. - -If `--target` is not specified, then all target dependencies are fetched. - -See also the [cargo-prefetch](https://crates.io/crates/cargo-prefetch) -plugin which adds a command to download popular crates. This may be useful if -you plan to use Cargo without a network with the `--offline` flag. - -## OPTIONS - -### Fetch options - -
-
--target triple
-
Fetch for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Fetch all dependencies: - - cargo fetch - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-update(1)](cargo-update.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-fix.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-fix.md deleted file mode 100644 index d98660516..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-fix.md +++ /dev/null @@ -1,460 +0,0 @@ -# cargo-fix(1) - - -## NAME - -cargo-fix - Automatically fix lint warnings reported by rustc - -## SYNOPSIS - -`cargo fix` [_options_] - -## DESCRIPTION - -This Cargo subcommand will automatically take rustc's suggestions from -diagnostics like warnings and apply them to your source code. This is intended -to help automate tasks that rustc itself already knows how to tell you to fix! - -Executing `cargo fix` will under the hood execute [cargo-check(1)](cargo-check.html). Any warnings -applicable to your crate will be automatically fixed (if possible) and all -remaining warnings will be displayed when the check process is finished. For -example if you'd like to apply all fixes to the current package, you can run: - - cargo fix - -which behaves the same as `cargo check --all-targets`. - -`cargo fix` is only capable of fixing code that is normally compiled with -`cargo check`. If code is conditionally enabled with optional features, you -will need to enable those features for that code to be analyzed: - - cargo fix --features foo - -Similarly, other `cfg` expressions like platform-specific code will need to -pass `--target` to fix code for the given target. - - cargo fix --target x86_64-pc-windows-gnu - -If you encounter any problems with `cargo fix` or otherwise have any questions -or feature requests please don't hesitate to file an issue at -. - -### Edition migration - -The `cargo fix` subcommand can also be used to migrate a package from one -[edition] to the next. The general procedure is: - -1. Run `cargo fix --edition`. Consider also using the `--all-features` flag if - your project has multiple features. You may also want to run `cargo fix - --edition` multiple times with different `--target` flags if your project - has platform-specific code gated by `cfg` attributes. -2. Modify `Cargo.toml` to set the [edition field] to the new edition. -3. Run your project tests to verify that everything still works. If new - warnings are issued, you may want to consider running `cargo fix` again - (without the `--edition` flag) to apply any suggestions given by the - compiler. - -And hopefully that's it! Just keep in mind of the caveats mentioned above that -`cargo fix` cannot update code for inactive features or `cfg` expressions. -Also, in some rare cases the compiler is unable to automatically migrate all -code to the new edition, and this may require manual changes after building -with the new edition. - -[edition]: https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html -[edition field]: ../reference/manifest.html#the-edition-field - -## OPTIONS - -### Fix options - -
- -
--broken-code
-
Fix code even if it already has compiler errors. This is useful if cargo fix -fails to apply the changes. It will apply the changes and leave the broken -code in the working directory for you to inspect and manually fix.
- - -
--edition
-
Apply changes that will update the code to the next edition. This will not -update the edition in the Cargo.toml manifest, which must be updated -manually after cargo fix --edition has finished.
- - -
--edition-idioms
-
Apply suggestions that will update code to the preferred style for the current -edition.
- - -
--allow-no-vcs
-
Fix code even if a VCS was not detected.
- - -
--allow-dirty
-
Fix code even if the working directory has changes.
- - -
--allow-staged
-
Fix code even if the working directory has staged changes.
- - -
- -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -
- -
-p spec...
-
--package spec...
-
Fix only the specified packages. See cargo-pkgid(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like *, ? and []. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern.
- - -
--workspace
-
Fix all members in the workspace.
- - - -
--all
-
Deprecated alias for --workspace.
- - - -
--exclude SPEC...
-
Exclude the specified packages. Must be used in conjunction with the ---workspace flag. This flag may be specified multiple times and supports -common Unix glob patterns like *, ? and []. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo fix` will fix all targets -(`--all-targets` implied). Binaries are skipped if they have -`required-features` that are missing. - -Passing target selection flags will fix only the specified -targets. - -Note that `--bin`, `--example`, `--test` and `--bench` flags also -support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. - -
- -
--lib
-
Fix the package's library.
- - -
--bin name...
-
Fix the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--bins
-
Fix all binary targets.
- - - -
--example name...
-
Fix the specified example. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--examples
-
Fix all example targets.
- - -
--test name...
-
Fix the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns.
- - -
--tests
-
Fix all targets in test mode that have the test = true manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the test flag in the -manifest settings for the target.
- - -
--bench name...
-
Fix the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns.
- - -
--benches
-
Fix all targets in benchmark mode that have the bench = true -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the bench flag in the -manifest settings for the target.
- - -
--all-targets
-
Fix all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
- - -
- - -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Fix for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
-r
-
--release
-
Fix optimized artifacts with the release profile. -See also the --profile option for choosing a specific profile by name.
- - - -
--profile name
-
Fix with the given profile.

-

As a special case, specifying the test profile will also enable checking in -test mode which will enable checking tests and enable the test cfg option. -See rustc tests for more -detail.

-

See the the reference for more details on profiles.

- - - -
--ignore-rust-version
-
Fix the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Apply compiler suggestions to the local package: - - cargo fix - -2. Update a package to prepare it for the next edition: - - cargo fix --edition - -3. Apply suggested idioms for the current edition: - - cargo fix --edition-idioms - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-check(1)](cargo-check.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-generate-lockfile.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-generate-lockfile.md deleted file mode 100644 index 20189031d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-generate-lockfile.md +++ /dev/null @@ -1,132 +0,0 @@ -# cargo-generate-lockfile(1) - -## NAME - -cargo-generate-lockfile - Generate the lockfile for a package - -## SYNOPSIS - -`cargo generate-lockfile` [_options_] - -## DESCRIPTION - -This command will create the `Cargo.lock` lockfile for the current package or -workspace. If the lockfile already exists, it will be rebuilt with the latest -available version of every package. - -See also [cargo-update(1)](cargo-update.html) which is also capable of creating a `Cargo.lock` -lockfile and has more options for controlling update behavior. - -## OPTIONS - -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Create or update the lockfile for the current package or workspace: - - cargo generate-lockfile - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-update(1)](cargo-update.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-help.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-help.md deleted file mode 100644 index a3bffb167..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-help.md +++ /dev/null @@ -1,26 +0,0 @@ -# cargo-help(1) - -## NAME - -cargo-help - Get help for a Cargo command - -## SYNOPSIS - -`cargo help` [_subcommand_] - -## DESCRIPTION - -Prints a help message for the given command. - -## EXAMPLES - -1. Get help for a command: - - cargo help build - -2. Help is also available with the `--help` flag: - - cargo build --help - -## SEE ALSO -[cargo(1)](cargo.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-init.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-init.md deleted file mode 100644 index 3cd8e46a3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-init.md +++ /dev/null @@ -1,145 +0,0 @@ -# cargo-init(1) - -## NAME - -cargo-init - Create a new Cargo package in an existing directory - -## SYNOPSIS - -`cargo init` [_options_] [_path_] - -## DESCRIPTION - -This command will create a new Cargo manifest in the current directory. Give a -path as an argument to create in the given directory. - -If there are typically-named Rust source files already in the directory, those -will be used. If not, then a sample `src/main.rs` file will be created, or -`src/lib.rs` if `--lib` is passed. - -If the directory is not already in a VCS repository, then a new repository -is created (see `--vcs` below). - -See [cargo-new(1)](cargo-new.html) for a similar command which will create a new package in -a new directory. - -## OPTIONS - -### Init Options - -
- -
--bin
-
Create a package with a binary target (src/main.rs). -This is the default behavior.
- - -
--lib
-
Create a package with a library target (src/lib.rs).
- - -
--edition edition
-
Specify the Rust edition to use. Default is 2021. -Possible values: 2015, 2018, 2021
- - -
--name name
-
Set the package name. Defaults to the directory name.
- - -
--vcs vcs
-
Initialize a new VCS repository for the given version control system (git, -hg, pijul, or fossil) or do not initialize any version control at all -(none). If not specified, defaults to git or the configuration value -cargo-new.vcs, or none if already inside a VCS repository.
- - -
--registry registry
-
This sets the publish field in Cargo.toml to the given registry name -which will restrict publishing only to that registry.

-

Registry names are defined in Cargo config files. -If not specified, the default registry defined by the registry.default -config key is used. If the default registry is not set and --registry is not -used, the publish field will not be set which means that publishing will not -be restricted.

- - -
- - -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Create a binary Cargo package in the current directory: - - cargo init - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-new(1)](cargo-new.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-install.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-install.md deleted file mode 100644 index 3576d51df..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-install.md +++ /dev/null @@ -1,390 +0,0 @@ -# cargo-install(1) - - - -## NAME - -cargo-install - Build and install a Rust binary - -## SYNOPSIS - -`cargo install` [_options_] _crate_...\ -`cargo install` [_options_] `--path` _path_\ -`cargo install` [_options_] `--git` _url_ [_crate_...]\ -`cargo install` [_options_] `--list` - -## DESCRIPTION - -This command manages Cargo's local set of installed binary crates. Only -packages which have executable `[[bin]]` or `[[example]]` targets can be -installed, and all executables are installed into the installation root's -`bin` folder. - -The installation root is determined, in order of precedence: - -- `--root` option -- `CARGO_INSTALL_ROOT` environment variable -- `install.root` Cargo [config value](../reference/config.html) -- `CARGO_HOME` environment variable -- `$HOME/.cargo` - - -There are multiple sources from which a crate can be installed. The default -location is crates.io but the `--git`, `--path`, and `--registry` flags can -change this source. If the source contains more than one package (such as -crates.io or a git repository with multiple crates) the _crate_ argument is -required to indicate which crate should be installed. - -Crates from crates.io can optionally specify the version they wish to install -via the `--version` flags, and similarly packages from git repositories can -optionally specify the branch, tag, or revision that should be installed. If a -crate has multiple binaries, the `--bin` argument can selectively install only -one of them, and if you'd rather install examples the `--example` argument can -be used as well. - -If the package is already installed, Cargo will reinstall it if the installed -version does not appear to be up-to-date. If any of the following values -change, then Cargo will reinstall the package: - -- The package version and source. -- The set of binary names installed. -- The chosen features. -- The profile (`--profile`). -- The target (`--target`). - -Installing with `--path` will always build and install, unless there are -conflicting binaries from another package. The `--force` flag may be used to -force Cargo to always reinstall the package. - -If the source is crates.io or `--git` then by default the crate will be built -in a temporary target directory. To avoid this, the target directory can be -specified by setting the `CARGO_TARGET_DIR` environment variable to a relative -path. In particular, this can be useful for caching build artifacts on -continuous integration systems. - -By default, the `Cargo.lock` file that is included with the package will be -ignored. This means that Cargo will recompute which versions of dependencies -to use, possibly using newer versions that have been released since the -package was published. The `--locked` flag can be used to force Cargo to use -the packaged `Cargo.lock` file if it is available. This may be useful for -ensuring reproducible builds, to use the exact same set of dependencies that -were available when the package was published. It may also be useful if a -newer version of a dependency is published that no longer builds on your -system, or has other problems. The downside to using `--locked` is that you -will not receive any fixes or updates to any dependency. Note that Cargo did -not start publishing `Cargo.lock` files until version 1.37, which means -packages published with prior versions will not have a `Cargo.lock` file -available. - -## OPTIONS - -### Install Options - -
- -
--vers version
-
--version version
-
Specify a version to install. This may be a version -requirement, like ~1.2, to have Cargo -select the newest version from the given requirement. If the version does not -have a requirement operator (such as ^ or ~), then it must be in the form -MAJOR.MINOR.PATCH, and will install exactly that version; it is not -treated as a caret requirement like Cargo dependencies are.
- - -
--git url
-
Git URL to install the specified crate from.
- - -
--branch branch
-
Branch to use when installing from git.
- - -
--tag tag
-
Tag to use when installing from git.
- - -
--rev sha
-
Specific commit to use when installing from git.
- - -
--path path
-
Filesystem path to local crate to install.
- - -
--list
-
List all installed packages and their versions.
- - -
-f
-
--force
-
Force overwriting existing crates or binaries. This can be used if a package -has installed a binary with the same name as another package. This is also -useful if something has changed on the system that you want to rebuild with, -such as a newer version of rustc.
- - -
--no-track
-
By default, Cargo keeps track of the installed packages with a metadata file -stored in the installation root directory. This flag tells Cargo not to use or -create that file. With this flag, Cargo will refuse to overwrite any existing -files unless the --force flag is used. This also disables Cargo's ability to -protect against multiple concurrent invocations of Cargo installing at the -same time.
- - -
--bin name...
-
Install only the specified binary.
- - -
--bins
-
Install all binaries.
- - -
--example name...
-
Install only the specified example.
- - -
--examples
-
Install all examples.
- - -
--root dir
-
Directory to install packages into.
- - -
--registry registry
-
Name of the registry to use. Registry names are defined in Cargo config -files. If not specified, the default registry is used, -which is defined by the registry.default config key which defaults to -crates-io.
- - - -
--index index
-
The URL of the registry index to use.
- - - -
- -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Install for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to a new temporary folder located in the -temporary directory of the platform.

-

When using --path, by default it will use target directory in the workspace -of the local crate unless --target-dir -is specified.

- - - -
--debug
-
Build with the dev profile instead the release profile. -See also the --profile option for choosing a specific profile by name.
- - -
--profile name
-
Install with the given profile. -See the the reference for more details on profiles.
- - - -
- -### Manifest Options - -
-
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Install or upgrade a package from crates.io: - - cargo install ripgrep - -2. Install or reinstall the package in the current directory: - - cargo install --path . - -3. View the list of installed packages: - - cargo install --list - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-uninstall(1)](cargo-uninstall.html), [cargo-search(1)](cargo-search.html), [cargo-publish(1)](cargo-publish.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-locate-project.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-locate-project.md deleted file mode 100644 index 600d9a7b0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-locate-project.md +++ /dev/null @@ -1,121 +0,0 @@ -# cargo-locate-project(1) - -## NAME - -cargo-locate-project - Print a JSON representation of a Cargo.toml file's location - -## SYNOPSIS - -`cargo locate-project` [_options_] - -## DESCRIPTION - -This command will print a JSON object to stdout with the full path to the -`Cargo.toml` manifest. - -## OPTIONS - -
- -
--workspace
-
Locate the Cargo.toml at the root of the workspace, as opposed to the current -workspace member.
- - -
- -### Display Options - -
- -
--message-format fmt
-
The representation in which to print the project location. Valid values:

-
    -
  • json (default): JSON object with the path under the key "root".
  • -
  • plain: Just the path.
  • -
- - -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Display the path to the manifest based on the current directory: - - cargo locate-project - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-metadata(1)](cargo-metadata.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-login.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-login.md deleted file mode 100644 index 654682498..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-login.md +++ /dev/null @@ -1,113 +0,0 @@ -# cargo-login(1) - -## NAME - -cargo-login - Save an API token from the registry locally - -## SYNOPSIS - -`cargo login` [_options_] [_token_] - -## DESCRIPTION - -This command will save the API token to disk so that commands that require -authentication, such as [cargo-publish(1)](cargo-publish.html), will be automatically -authenticated. The token is saved in `$CARGO_HOME/credentials.toml`. `CARGO_HOME` -defaults to `.cargo` in your home directory. - -If the _token_ argument is not specified, it will be read from stdin. - -The API token for crates.io may be retrieved from . - -Take care to keep the token secret, it should not be shared with anyone else. - -## OPTIONS - -### Login Options - -
-
--registry registry
-
Name of the registry to use. Registry names are defined in Cargo config -files. If not specified, the default registry is used, -which is defined by the registry.default config key which defaults to -crates-io.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Save the API token to disk: - - cargo login - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-publish(1)](cargo-publish.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-metadata.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-metadata.md deleted file mode 100644 index 870b6ef35..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-metadata.md +++ /dev/null @@ -1,460 +0,0 @@ -# cargo-metadata(1) - -## NAME - -cargo-metadata - Machine-readable metadata about the current package - -## SYNOPSIS - -`cargo metadata` [_options_] - -## DESCRIPTION - -Output JSON to stdout containing information about the workspace members and -resolved dependencies of the current package. - -It is recommended to include the `--format-version` flag to future-proof -your code to ensure the output is in the format you are expecting. - -See the [cargo_metadata crate](https://crates.io/crates/cargo_metadata) -for a Rust API for reading the metadata. - -## OUTPUT FORMAT - -The output has the following format: - -```javascript -{ - /* Array of all packages in the workspace. - It also includes all feature-enabled dependencies unless --no-deps is used. - */ - "packages": [ - { - /* The name of the package. */ - "name": "my-package", - /* The version of the package. */ - "version": "0.1.0", - /* The Package ID, a unique identifier for referring to the package. */ - "id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* The license value from the manifest, or null. */ - "license": "MIT/Apache-2.0", - /* The license-file value from the manifest, or null. */ - "license_file": "LICENSE", - /* The description value from the manifest, or null. */ - "description": "Package description.", - /* The source ID of the package. This represents where - a package is retrieved from. - This is null for path dependencies and workspace members. - For other dependencies, it is a string with the format: - - "registry+URL" for registry-based dependencies. - Example: "registry+https://github.com/rust-lang/crates.io-index" - - "git+URL" for git-based dependencies. - Example: "git+https://github.com/rust-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" - */ - "source": null, - /* Array of dependencies declared in the package's manifest. */ - "dependencies": [ - { - /* The name of the dependency. */ - "name": "bitflags", - /* The source ID of the dependency. May be null, see - description for the package source. - */ - "source": "registry+https://github.com/rust-lang/crates.io-index", - /* The version requirement for the dependency. - Dependencies without a version requirement have a value of "*". - */ - "req": "^1.0", - /* The dependency kind. - "dev", "build", or null for a normal dependency. - */ - "kind": null, - /* If the dependency is renamed, this is the new name for - the dependency as a string. null if it is not renamed. - */ - "rename": null, - /* Boolean of whether or not this is an optional dependency. */ - "optional": false, - /* Boolean of whether or not default features are enabled. */ - "uses_default_features": true, - /* Array of features enabled. */ - "features": [], - /* The target platform for the dependency. - null if not a target dependency. - */ - "target": "cfg(windows)", - /* The file system path for a local path dependency. - not present if not a path dependency. - */ - "path": "/path/to/dep", - /* A string of the URL of the registry this dependency is from. - If not specified or null, the dependency is from the default - registry (crates.io). - */ - "registry": null - } - ], - /* Array of Cargo targets. */ - "targets": [ - { - /* Array of target kinds. - - lib targets list the `crate-type` values from the - manifest such as "lib", "rlib", "dylib", - "proc-macro", etc. (default ["lib"]) - - binary is ["bin"] - - example is ["example"] - - integration test is ["test"] - - benchmark is ["bench"] - - build script is ["custom-build"] - */ - "kind": [ - "bin" - ], - /* Array of crate types. - - lib and example libraries list the `crate-type` values - from the manifest such as "lib", "rlib", "dylib", - "proc-macro", etc. (default ["lib"]) - - all other target kinds are ["bin"] - */ - "crate_types": [ - "bin" - ], - /* The name of the target. */ - "name": "my-package", - /* Absolute path to the root source file of the target. */ - "src_path": "/path/to/my-package/src/main.rs", - /* The Rust edition of the target. - Defaults to the package edition. - */ - "edition": "2018", - /* Array of required features. - This property is not included if no required features are set. - */ - "required-features": ["feat1"], - /* Whether the target should be documented by `cargo doc`. */ - "doc": true, - /* Whether or not this target has doc tests enabled, and - the target is compatible with doc testing. - */ - "doctest": false, - /* Whether or not this target should be built and run with `--test` - */ - "test": true - } - ], - /* Set of features defined for the package. - Each feature maps to an array of features or dependencies it - enables. - */ - "features": { - "default": [ - "feat1" - ], - "feat1": [], - "feat2": [] - }, - /* Absolute path to this package's manifest. */ - "manifest_path": "/path/to/my-package/Cargo.toml", - /* Package metadata. - This is null if no metadata is specified. - */ - "metadata": { - "docs": { - "rs": { - "all-features": true - } - } - }, - /* List of registries to which this package may be published. - Publishing is unrestricted if null, and forbidden if an empty array. */ - "publish": [ - "crates-io" - ], - /* Array of authors from the manifest. - Empty array if no authors specified. - */ - "authors": [ - "Jane Doe " - ], - /* Array of categories from the manifest. */ - "categories": [ - "command-line-utilities" - ], - /* Optional string that is the default binary picked by cargo run. */ - "default_run": null, - /* Optional string that is the minimum supported rust version */ - "rust_version": "1.56", - /* Array of keywords from the manifest. */ - "keywords": [ - "cli" - ], - /* The readme value from the manifest or null if not specified. */ - "readme": "README.md", - /* The repository value from the manifest or null if not specified. */ - "repository": "https://github.com/rust-lang/cargo", - /* The homepage value from the manifest or null if not specified. */ - "homepage": "https://rust-lang.org", - /* The documentation value from the manifest or null if not specified. */ - "documentation": "https://doc.rust-lang.org/stable/std", - /* The default edition of the package. - Note that individual targets may have different editions. - */ - "edition": "2018", - /* Optional string that is the name of a native library the package - is linking to. - */ - "links": null, - } - ], - /* Array of members of the workspace. - Each entry is the Package ID for the package. - */ - "workspace_members": [ - "my-package 0.1.0 (path+file:///path/to/my-package)", - ], - // The resolved dependency graph for the entire workspace. The enabled - // features are based on the enabled features for the "current" package. - // Inactivated optional dependencies are not listed. - // - // This is null if --no-deps is specified. - // - // By default, this includes all dependencies for all target platforms. - // The `--filter-platform` flag may be used to narrow to a specific - // target triple. - "resolve": { - /* Array of nodes within the dependency graph. - Each node is a package. - */ - "nodes": [ - { - /* The Package ID of this node. */ - "id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* The dependencies of this package, an array of Package IDs. */ - "dependencies": [ - "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" - ], - /* The dependencies of this package. This is an alternative to - "dependencies" which contains additional information. In - particular, this handles renamed dependencies. - */ - "deps": [ - { - /* The name of the dependency's library target. - If this is a renamed dependency, this is the new - name. - */ - "name": "bitflags", - /* The Package ID of the dependency. */ - "pkg": "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - /* Array of dependency kinds. Added in Cargo 1.40. */ - "dep_kinds": [ - { - /* The dependency kind. - "dev", "build", or null for a normal dependency. - */ - "kind": null, - /* The target platform for the dependency. - null if not a target dependency. - */ - "target": "cfg(windows)" - } - ] - } - ], - /* Array of features enabled on this package. */ - "features": [ - "default" - ] - } - ], - /* The root package of the workspace. - This is null if this is a virtual workspace. Otherwise it is - the Package ID of the root package. - */ - "root": "my-package 0.1.0 (path+file:///path/to/my-package)" - }, - /* The absolute path to the build directory where Cargo places its output. */ - "target_directory": "/path/to/my-package/target", - /* The version of the schema for this metadata structure. - This will be changed if incompatible changes are ever made. - */ - "version": 1, - /* The absolute path to the root of the workspace. */ - "workspace_root": "/path/to/my-package" - /* Workspace metadata. - This is null if no metadata is specified. */ - "metadata": { - "docs": { - "rs": { - "all-features": true - } - } - } -} -```` - -## OPTIONS - -### Output Options - -
- -
--no-deps
-
Output information only about the workspace members and don't fetch -dependencies.
- - -
--format-version version
-
Specify the version of the output format to use. Currently 1 is the only -possible value.
- - -
--filter-platform triple
-
This filters the resolve output to only include dependencies for the -given target triple. Without this flag, the resolve includes all targets.

-

Note that the dependencies listed in the "packages" array still includes all -dependencies. Each package definition is intended to be an unaltered -reproduction of the information within Cargo.toml.

- - -
- -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Output JSON about the current package: - - cargo metadata --format-version=1 - -## SEE ALSO -[cargo(1)](cargo.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-new.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-new.md deleted file mode 100644 index 89f26c65b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-new.md +++ /dev/null @@ -1,140 +0,0 @@ -# cargo-new(1) - -## NAME - -cargo-new - Create a new Cargo package - -## SYNOPSIS - -`cargo new` [_options_] _path_ - -## DESCRIPTION - -This command will create a new Cargo package in the given directory. This -includes a simple template with a `Cargo.toml` manifest, sample source file, -and a VCS ignore file. If the directory is not already in a VCS repository, -then a new repository is created (see `--vcs` below). - -See [cargo-init(1)](cargo-init.html) for a similar command which will create a new manifest -in an existing directory. - -## OPTIONS - -### New Options - -
- -
--bin
-
Create a package with a binary target (src/main.rs). -This is the default behavior.
- - -
--lib
-
Create a package with a library target (src/lib.rs).
- - -
--edition edition
-
Specify the Rust edition to use. Default is 2021. -Possible values: 2015, 2018, 2021
- - -
--name name
-
Set the package name. Defaults to the directory name.
- - -
--vcs vcs
-
Initialize a new VCS repository for the given version control system (git, -hg, pijul, or fossil) or do not initialize any version control at all -(none). If not specified, defaults to git or the configuration value -cargo-new.vcs, or none if already inside a VCS repository.
- - -
--registry registry
-
This sets the publish field in Cargo.toml to the given registry name -which will restrict publishing only to that registry.

-

Registry names are defined in Cargo config files. -If not specified, the default registry defined by the registry.default -config key is used. If the default registry is not set and --registry is not -used, the publish field will not be set which means that publishing will not -be restricted.

- - -
- - -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Create a binary Cargo package in the given directory: - - cargo new foo - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-init(1)](cargo-init.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-owner.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-owner.md deleted file mode 100644 index 9d6998bb2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-owner.md +++ /dev/null @@ -1,159 +0,0 @@ -# cargo-owner(1) - -## NAME - -cargo-owner - Manage the owners of a crate on the registry - -## SYNOPSIS - -`cargo owner` [_options_] `--add` _login_ [_crate_]\ -`cargo owner` [_options_] `--remove` _login_ [_crate_]\ -`cargo owner` [_options_] `--list` [_crate_] - -## DESCRIPTION - -This command will modify the owners for a crate on the registry. Owners of a -crate can upload new versions and yank old versions. Non-team owners can also -modify the set of owners, so take care! - -This command requires you to be authenticated with either the `--token` option -or using [cargo-login(1)](cargo-login.html). - -If the crate name is not specified, it will use the package name from the -current directory. - -See [the reference](../reference/publishing.html#cargo-owner) for more -information about owners and publishing. - -## OPTIONS - -### Owner Options - -
- -
-a
-
--add login...
-
Invite the given user or team as an owner.
- - -
-r
-
--remove login...
-
Remove the given user or team as an owner.
- - -
-l
-
--list
-
List owners of a crate.
- - -
--token token
-
API token to use when authenticating. This overrides the token stored in -the credentials file (which is created by cargo-login(1)).

-

Cargo config environment variables can be -used to override the tokens stored in the credentials file. The token for -crates.io may be specified with the CARGO_REGISTRY_TOKEN environment -variable. Tokens for other registries may be specified with environment -variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name -of the registry in all capital letters.

- - - -
--index index
-
The URL of the registry index to use.
- - - -
--registry registry
-
Name of the registry to use. Registry names are defined in Cargo config -files. If not specified, the default registry is used, -which is defined by the registry.default config key which defaults to -crates-io.
- - - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. List owners of a package: - - cargo owner --list foo - -2. Invite an owner to a package: - - cargo owner --add username foo - -3. Remove an owner from a package: - - cargo owner --remove username foo - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-login(1)](cargo-login.html), [cargo-publish(1)](cargo-publish.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-package.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-package.md deleted file mode 100644 index 57f11332f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-package.md +++ /dev/null @@ -1,308 +0,0 @@ -# cargo-package(1) - - - -## NAME - -cargo-package - Assemble the local package into a distributable tarball - -## SYNOPSIS - -`cargo package` [_options_] - -## DESCRIPTION - -This command will create a distributable, compressed `.crate` file with the -source code of the package in the current directory. The resulting file will -be stored in the `target/package` directory. This performs the following -steps: - -1. Load and check the current workspace, performing some basic checks. - - Path dependencies are not allowed unless they have a version key. Cargo - will ignore the path key for dependencies in published packages. - `dev-dependencies` do not have this restriction. -2. Create the compressed `.crate` file. - - The original `Cargo.toml` file is rewritten and normalized. - - `[patch]`, `[replace]`, and `[workspace]` sections are removed from the - manifest. - - `Cargo.lock` is automatically included if the package contains an - executable binary or example target. [cargo-install(1)](cargo-install.html) will use the - packaged lock file if the `--locked` flag is used. - - A `.cargo_vcs_info.json` file is included that contains information - about the current VCS checkout hash if available (not included with - `--allow-dirty`). -3. Extract the `.crate` file and build it to verify it can build. - - This will rebuild your package from scratch to ensure that it can be - built from a pristine state. The `--no-verify` flag can be used to skip - this step. -4. Check that build scripts did not modify any source files. - -The list of files included can be controlled with the `include` and `exclude` -fields in the manifest. - -See [the reference](../reference/publishing.html) for more details about -packaging and publishing. - -### .cargo_vcs_info.json format - -Will generate a `.cargo_vcs_info.json` in the following format - -```javascript -{ - "git": { - "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302" - }, - "path_in_vcs": "" -} -``` - -`path_in_vcs` will be set to a repo-relative path for packages -in subdirectories of the version control repository. - -## OPTIONS - -### Package Options - -
- -
-l
-
--list
-
Print files included in a package without making one.
- - -
--no-verify
-
Don't verify the contents by building them.
- - -
--no-metadata
-
Ignore warnings about a lack of human-usable metadata (such as the description -or the license).
- - -
--allow-dirty
-
Allow working directories with uncommitted VCS changes to be packaged.
- - -
- -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -
- -
-p spec...
-
--package spec...
-
Package only the specified packages. See cargo-pkgid(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like *, ? and []. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern.
- - -
--workspace
-
Package all members in the workspace.
- - - - -
--exclude SPEC...
-
Exclude the specified packages. Must be used in conjunction with the ---workspace flag. This flag may be specified multiple times and supports -common Unix glob patterns like *, ? and []. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Package for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - - -
- -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Create a compressed `.crate` file of the current package: - - cargo package - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-publish(1)](cargo-publish.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-pkgid.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-pkgid.md deleted file mode 100644 index 0009f5343..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-pkgid.md +++ /dev/null @@ -1,173 +0,0 @@ -# cargo-pkgid(1) - -## NAME - -cargo-pkgid - Print a fully qualified package specification - -## SYNOPSIS - -`cargo pkgid` [_options_] [_spec_] - -## DESCRIPTION - -Given a _spec_ argument, print out the fully qualified package ID specifier -for a package or dependency in the current workspace. This command will -generate an error if _spec_ is ambiguous as to which package it refers to in -the dependency graph. If no _spec_ is given, then the specifier for the local -package is printed. - -This command requires that a lockfile is available and dependencies have been -fetched. - -A package specifier consists of a name, version, and source URL. You are -allowed to use partial specifiers to succinctly match a specific package as -long as it matches only one package. The format of a _spec_ can be one of the -following: - -SPEC Structure | Example SPEC ----------------------------|-------------- -_name_ | `bitflags` -_name_`:`_version_ | `bitflags:1.0.4` -_url_ | `https://github.com/rust-lang/cargo` -_url_`#`_version_ | `https://github.com/rust-lang/cargo#0.33.0` -_url_`#`_name_ | `https://github.com/rust-lang/crates.io-index#bitflags` -_url_`#`_name_`:`_version_ | `https://github.com/rust-lang/cargo#crates-io:0.21.0` - -## OPTIONS - -### Package Selection - -
- -
-p spec
-
--package spec
-
Get the package ID for the given package instead of the current package.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Retrieve package specification for `foo` package: - - cargo pkgid foo - -2. Retrieve package specification for version 1.0.0 of `foo`: - - cargo pkgid foo:1.0.0 - -3. Retrieve package specification for `foo` from crates.io: - - cargo pkgid https://github.com/rust-lang/crates.io-index#foo - -4. Retrieve package specification for `foo` from a local package: - - cargo pkgid file:///path/to/local/package#foo - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html), [cargo-metadata(1)](cargo-metadata.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-publish.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-publish.md deleted file mode 100644 index f35e30ef7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-publish.md +++ /dev/null @@ -1,274 +0,0 @@ -# cargo-publish(1) - - -## NAME - -cargo-publish - Upload a package to the registry - -## SYNOPSIS - -`cargo publish` [_options_] - -## DESCRIPTION - -This command will create a distributable, compressed `.crate` file with the -source code of the package in the current directory and upload it to a -registry. The default registry is . This performs the -following steps: - -1. Performs a few checks, including: - - Checks the `package.publish` key in the manifest for restrictions on - which registries you are allowed to publish to. -2. Create a `.crate` file by following the steps in [cargo-package(1)](cargo-package.html). -3. Upload the crate to the registry. Note that the server will perform - additional checks on the crate. - -This command requires you to be authenticated with either the `--token` option -or using [cargo-login(1)](cargo-login.html). - -See [the reference](../reference/publishing.html) for more details about -packaging and publishing. - -## OPTIONS - -### Publish Options - -
- -
--dry-run
-
Perform all checks without uploading.
- - -
--token token
-
API token to use when authenticating. This overrides the token stored in -the credentials file (which is created by cargo-login(1)).

-

Cargo config environment variables can be -used to override the tokens stored in the credentials file. The token for -crates.io may be specified with the CARGO_REGISTRY_TOKEN environment -variable. Tokens for other registries may be specified with environment -variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name -of the registry in all capital letters.

- - - -
--no-verify
-
Don't verify the contents by building them.
- - -
--allow-dirty
-
Allow working directories with uncommitted VCS changes to be packaged.
- - -
--index index
-
The URL of the registry index to use.
- - - -
--registry registry
-
Name of the registry to publish to. Registry names are defined in Cargo -config files. If not specified, and there is a -package.publish field in -Cargo.toml with a single registry, then it will publish to that registry. -Otherwise it will use the default registry, which is defined by the -registry.default config key -which defaults to crates-io.
- - -
- -### Package Selection - -By default, the package in the current working directory is selected. The `-p` -flag can be used to choose a different package in a workspace. - -
- -
-p spec
-
--package spec
-
The package to publish. See cargo-pkgid(1) for the SPEC -format.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Publish for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - - -
- -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Publish the current package: - - cargo publish - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-package(1)](cargo-package.html), [cargo-login(1)](cargo-login.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-report.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-report.md deleted file mode 100644 index 5df2303e6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-report.md +++ /dev/null @@ -1,43 +0,0 @@ -# cargo-report(1) - -## NAME - -cargo-report - Generate and display various kinds of reports - -## SYNOPSIS - -`cargo report` _type_ [_options_] - -### DESCRIPTION - -Displays a report of the given _type_ - currently, only `future-incompat` is supported - -## OPTIONS - -
- -
--id id
-
Show the report with the specified Cargo-generated id
- - -
-p spec...
-
--package spec...
-
Only display a report for the specified package
- - -
- -## EXAMPLES - -1. Display the latest future-incompat report: - - cargo report future-incompat - -2. Display the latest future-incompat report for a specific package: - - cargo report future-incompat --package my-dep:0.0.1 - -## SEE ALSO -[Future incompat report](../reference/future-incompat-report.html) - -[cargo(1)](cargo.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-run.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-run.md deleted file mode 100644 index 5dbc06634..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-run.md +++ /dev/null @@ -1,289 +0,0 @@ -# cargo-run(1) - - -## NAME - -cargo-run - Run the current package - -## SYNOPSIS - -`cargo run` [_options_] [`--` _args_] - -## DESCRIPTION - -Run a binary or example of the local package. - -All the arguments following the two dashes (`--`) are passed to the binary to -run. If you're passing arguments to both Cargo and the binary, the ones after -`--` go to the binary, the ones before go to Cargo. - -## OPTIONS - -### Package Selection - -By default, the package in the current working directory is selected. The `-p` -flag can be used to choose a different package in a workspace. - -
- -
-p spec
-
--package spec
-
The package to run. See cargo-pkgid(1) for the SPEC -format.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo run` will run the binary -target. If there are multiple binary targets, you must pass a target flag to -choose one. Or, the `default-run` field may be specified in the `[package]` -section of `Cargo.toml` to choose the name of the binary to run by default. - -
- -
--bin name
-
Run the specified binary.
- - -
--example name
-
Run the specified example.
- - -
- -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Run for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
-r
-
--release
-
Run optimized artifacts with the release profile. -See also the --profile option for choosing a specific profile by name.
- - - -
--profile name
-
Run with the given profile. -See the the reference for more details on profiles.
- - - -
--ignore-rust-version
-
Run the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - -
- -### Display Options - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - - -
- -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Build the local package and run its main target (assuming only one binary): - - cargo run - -2. Run an example with extra arguments: - - cargo run --example exname -- --exoption exarg1 exarg2 - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-rustc.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-rustc.md deleted file mode 100644 index 28879dfee..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-rustc.md +++ /dev/null @@ -1,375 +0,0 @@ -# cargo-rustc(1) - - -## NAME - -cargo-rustc - Compile the current package, and pass extra options to the compiler - -## SYNOPSIS - -`cargo rustc` [_options_] [`--` _args_] - -## DESCRIPTION - -The specified target for the current package (or package specified by `-p` if -provided) will be compiled along with all of its dependencies. The specified -_args_ will all be passed to the final compiler invocation, not any of the -dependencies. Note that the compiler will still unconditionally receive -arguments such as `-L`, `--extern`, and `--crate-type`, and the specified -_args_ will simply be added to the compiler invocation. - -See for documentation on rustc -flags. - -This command requires that only one target is being compiled when additional -arguments are provided. If more than one target is available for the current -package the filters of `--lib`, `--bin`, etc, must be used to select which -target is compiled. - -To pass flags to all compiler processes spawned by Cargo, use the `RUSTFLAGS` -[environment variable](../reference/environment-variables.html) or the -`build.rustflags` [config value](../reference/config.html). - -## OPTIONS - -### Package Selection - -By default, the package in the current working directory is selected. The `-p` -flag can be used to choose a different package in a workspace. - -
- -
-p spec
-
--package spec
-
The package to build. See cargo-pkgid(1) for the SPEC -format.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo rustc` will build all -binary and library targets of the selected package. - -Passing target selection flags will build only the specified -targets. - -Note that `--bin`, `--example`, `--test` and `--bench` flags also -support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. - -
- -
--lib
-
Build the package's library.
- - -
--bin name...
-
Build the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--bins
-
Build all binary targets.
- - - -
--example name...
-
Build the specified example. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--examples
-
Build all example targets.
- - -
--test name...
-
Build the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns.
- - -
--tests
-
Build all targets in test mode that have the test = true manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the test flag in the -manifest settings for the target.
- - -
--bench name...
-
Build the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns.
- - -
--benches
-
Build all targets in benchmark mode that have the bench = true -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the bench flag in the -manifest settings for the target.
- - -
--all-targets
-
Build all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
- - -
- - -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Build for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
-r
-
--release
-
Build optimized artifacts with the release profile. -See also the --profile option for choosing a specific profile by name.
- - - -
--profile name
-
Build with the given profile.

-

The rustc subcommand will treat the following named profiles with special behaviors:

-
    -
  • check โ€” Builds in the same way as the cargo-check(1) command with -the dev profile.
  • -
  • test โ€” Builds in the same way as the cargo-test(1) command, -enabling building in test mode which will enable tests and enable the test -cfg option. See rustc -tests for more detail.
  • -
  • bench โ€” Builds in the same was as the cargo-bench(1) command, -similar to the test profile.
  • -
-

See the the reference for more details on profiles.

- - -
--ignore-rust-version
-
Build the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - -
- -### Display Options - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - - -
- -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
--future-incompat-report
-
Displays a future-incompat report for any future-incompatible warnings -produced during execution of this command

-

See cargo-report(1)

- - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Check if your package (not including dependencies) uses unsafe code: - - cargo rustc --lib -- -D unsafe-code - -2. Try an experimental flag on the nightly compiler, such as this which prints - the size of every type: - - cargo rustc --lib -- -Z print-type-sizes - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-build(1)](cargo-build.html), [rustc(1)](https://doc.rust-lang.org/rustc/index.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-rustdoc.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-rustdoc.md deleted file mode 100644 index 2bc43ed44..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-rustdoc.md +++ /dev/null @@ -1,365 +0,0 @@ -# cargo-rustdoc(1) - - -## NAME - -cargo-rustdoc - Build a package's documentation, using specified custom flags - -## SYNOPSIS - -`cargo rustdoc` [_options_] [`--` _args_] - -## DESCRIPTION - -The specified target for the current package (or package specified by `-p` if -provided) will be documented with the specified _args_ being passed to the -final rustdoc invocation. Dependencies will not be documented as part of this -command. Note that rustdoc will still unconditionally receive arguments such -as `-L`, `--extern`, and `--crate-type`, and the specified _args_ will simply -be added to the rustdoc invocation. - -See for documentation on rustdoc -flags. - -This command requires that only one target is being compiled when additional -arguments are provided. If more than one target is available for the current -package the filters of `--lib`, `--bin`, etc, must be used to select which -target is compiled. - -To pass flags to all rustdoc processes spawned by Cargo, use the -`RUSTDOCFLAGS` [environment variable](../reference/environment-variables.html) -or the `build.rustdocflags` [config value](../reference/config.html). - -## OPTIONS - -### Documentation Options - -
- -
--open
-
Open the docs in a browser after building them. This will use your default -browser unless you define another one in the BROWSER environment variable -or use the doc.browser configuration -option.
- - -
- -### Package Selection - -By default, the package in the current working directory is selected. The `-p` -flag can be used to choose a different package in a workspace. - -
- -
-p spec
-
--package spec
-
The package to document. See cargo-pkgid(1) for the SPEC -format.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo rustdoc` will document all -binary and library targets of the selected package. The binary will be skipped -if its name is the same as the lib target. Binaries are skipped if they have -`required-features` that are missing. - -Passing target selection flags will document only the specified -targets. - -Note that `--bin`, `--example`, `--test` and `--bench` flags also -support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. - -
- -
--lib
-
Document the package's library.
- - -
--bin name...
-
Document the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--bins
-
Document all binary targets.
- - - -
--example name...
-
Document the specified example. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--examples
-
Document all example targets.
- - -
--test name...
-
Document the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns.
- - -
--tests
-
Document all targets in test mode that have the test = true manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the test flag in the -manifest settings for the target.
- - -
--bench name...
-
Document the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns.
- - -
--benches
-
Document all targets in benchmark mode that have the bench = true -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the bench flag in the -manifest settings for the target.
- - -
--all-targets
-
Document all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
- - -
- - -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Document for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
-r
-
--release
-
Document optimized artifacts with the release profile. -See also the --profile option for choosing a specific profile by name.
- - - -
--profile name
-
Document with the given profile. -See the the reference for more details on profiles.
- - - -
--ignore-rust-version
-
Document the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - -
- -### Manifest Options - -
-
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -
-
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Build documentation with custom CSS included from a given file: - - cargo rustdoc --lib -- --extend-css extra.css - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-doc(1)](cargo-doc.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-search.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-search.md deleted file mode 100644 index 5a117683b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-search.md +++ /dev/null @@ -1,117 +0,0 @@ -# cargo-search(1) - -## NAME - -cargo-search - Search packages in crates.io - -## SYNOPSIS - -`cargo search` [_options_] [_query_...] - -## DESCRIPTION - -This performs a textual search for crates on . The matching -crates will be displayed along with their description in TOML format suitable -for copying into a `Cargo.toml` manifest. - -## OPTIONS - -### Search Options - -
- -
--limit limit
-
Limit the number of results (default: 10, max: 100).
- - -
--index index
-
The URL of the registry index to use.
- - - -
--registry registry
-
Name of the registry to use. Registry names are defined in Cargo config -files. If not specified, the default registry is used, -which is defined by the registry.default config key which defaults to -crates-io.
- - - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Search for a package from crates.io: - - cargo search serde - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-install(1)](cargo-install.html), [cargo-publish(1)](cargo-publish.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-test.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-test.md deleted file mode 100644 index 4b28129ec..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-test.md +++ /dev/null @@ -1,476 +0,0 @@ -# cargo-test(1) - - - -## NAME - -cargo-test - Execute unit and integration tests of a package - -## SYNOPSIS - -`cargo test` [_options_] [_testname_] [`--` _test-options_] - -## DESCRIPTION - -Compile and execute unit and integration tests. - -The test filtering argument `TESTNAME` and all the arguments following the two -dashes (`--`) are passed to the test binaries and thus to _libtest_ (rustc's -built in unit-test and micro-benchmarking framework). If you're passing -arguments to both Cargo and the binary, the ones after `--` go to the binary, -the ones before go to Cargo. For details about libtest's arguments see the -output of `cargo test -- --help` and check out the rustc book's chapter on -how tests work at . - -As an example, this will filter for tests with `foo` in their name and run them -on 3 threads in parallel: - - cargo test foo -- --test-threads 3 - -Tests are built with the `--test` option to `rustc` which creates an -executable with a `main` function that automatically runs all functions -annotated with the `#[test]` attribute in multiple threads. `#[bench]` -annotated functions will also be run with one iteration to verify that they -are functional. - -The libtest harness may be disabled by setting `harness = false` in the target -manifest settings, in which case your code will need to provide its own `main` -function to handle running tests. - -Documentation tests are also run by default, which is handled by `rustdoc`. It -extracts code samples from documentation comments and executes them. See the -[rustdoc book](https://doc.rust-lang.org/rustdoc/) for more information on -writing doc tests. - -## OPTIONS - -### Test Options - -
- -
--no-run
-
Compile, but don't run tests.
- - -
--no-fail-fast
-
Run all tests regardless of failure. Without this flag, Cargo will exit -after the first executable fails. The Rust test harness will run all tests -within the executable to completion, this flag only applies to the executable -as a whole.
- - -
- - -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -
- -
-p spec...
-
--package spec...
-
Test only the specified packages. See cargo-pkgid(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like *, ? and []. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern.
- - -
--workspace
-
Test all members in the workspace.
- - - -
--all
-
Deprecated alias for --workspace.
- - - -
--exclude SPEC...
-
Exclude the specified packages. Must be used in conjunction with the ---workspace flag. This flag may be specified multiple times and supports -common Unix glob patterns like *, ? and []. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern.
- - -
- - -### Target Selection - -When no target selection options are given, `cargo test` will build the -following targets of the selected packages: - -- lib โ€” used to link with binaries, examples, integration tests, and doc tests -- bins (only if integration tests are built and required features are - available) -- examples โ€” to ensure they compile -- lib as a unit test -- bins as unit tests -- integration tests -- doc tests for the lib target - -The default behavior can be changed by setting the `test` flag for the target -in the manifest settings. Setting examples to `test = true` will build and run -the example as a test. Setting targets to `test = false` will stop them from -being tested by default. Target selection options that take a target by name -ignore the `test` flag and will always test the given target. - -Doc tests for libraries may be disabled by setting `doctest = false` for the -library in the manifest. - -Binary targets are automatically built if there is an integration test or -benchmark. This allows an integration test to execute the binary to exercise -and test its behavior. The `CARGO_BIN_EXE_` -[environment variable](../reference/environment-variables.html#environment-variables-cargo-sets-for-crates) -is set when the integration test is built so that it can use the -[`env` macro](https://doc.rust-lang.org/std/macro.env.html) to locate the -executable. - -Passing target selection flags will test only the specified -targets. - -Note that `--bin`, `--example`, `--test` and `--bench` flags also -support common Unix glob patterns like `*`, `?` and `[]`. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. - -
- -
--lib
-
Test the package's library.
- - -
--bin name...
-
Test the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--bins
-
Test all binary targets.
- - - -
--example name...
-
Test the specified example. This flag may be specified multiple times -and supports common Unix glob patterns.
- - -
--examples
-
Test all example targets.
- - -
--test name...
-
Test the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns.
- - -
--tests
-
Test all targets in test mode that have the test = true manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the test flag in the -manifest settings for the target.
- - -
--bench name...
-
Test the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns.
- - -
--benches
-
Test all targets in benchmark mode that have the bench = true -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the bench flag in the -manifest settings for the target.
- - -
--all-targets
-
Test all targets. This is equivalent to specifying --lib --bins --tests --benches --examples.
- - -
- - -
- -
--doc
-
Test only the library's documentation. This cannot be mixed with other -target options.
- - -
- -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Compilation Options - -
- -
--target triple
-
Test for the given architecture. The default is the host -architecture. The general format of the triple is -<arch><sub>-<vendor>-<sys>-<abi>. Run rustc --print target-list for a -list of supported targets.

-

This may also be specified with the build.target -config value.

-

Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -build cache documentation for more details.

- - - -
-r
-
--release
-
Test optimized artifacts with the release profile. -See also the --profile option for choosing a specific profile by name.
- - - -
--profile name
-
Test with the given profile. -See the the reference for more details on profiles.
- - - -
--ignore-rust-version
-
Test the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's rust-version field.
- - - -
- -### Output Options - -
-
--target-dir directory
-
Directory for all generated artifacts and intermediate files. May also be -specified with the CARGO_TARGET_DIR environment variable, or the -build.target-dir config value. -Defaults to target in the root of the workspace.
- - -
- -### Display Options - -By default the Rust test harness hides output from test execution to keep -results readable. Test output can be recovered (e.g., for debugging) by passing -`--nocapture` to the test binaries: - - cargo test -- --nocapture - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
--message-format fmt
-
The output format for diagnostic messages. Can be specified multiple times -and consists of comma-separated values. Valid values:

-
    -
  • human (default): Display in a human-readable text format. Conflicts with -short and json.
  • -
  • short: Emit shorter, human-readable text messages. Conflicts with human -and json.
  • -
  • json: Emit JSON messages to stdout. See -the reference -for more details. Conflicts with human and short.
  • -
  • json-diagnostic-short: Ensure the rendered field of JSON messages contains -the "short" rendering from rustc. Cannot be used with human or short.
  • -
  • json-diagnostic-rendered-ansi: Ensure the rendered field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with human or short.
  • -
  • json-render-diagnostics: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with human or short.
  • -
- - - -
- -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -### Miscellaneous Options - -The `--jobs` argument affects the building of the test executable but does not -affect how many threads are used when running the tests. The Rust test harness -includes an option to control the number of threads used: - - cargo test -j 2 -- --test-threads=2 - -
- -
-j N
-
--jobs N
-
Number of parallel jobs to run. May also be specified with the -build.jobs config value. Defaults to -the number of CPUs.
- - -
--future-incompat-report
-
Displays a future-incompat report for any future-incompatible warnings -produced during execution of this command

-

See cargo-report(1)

- - - -
- -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Execute all the unit and integration tests of the current package: - - cargo test - -2. Run only tests whose names match against a filter string: - - cargo test name_filter - -3. Run only a specific test within a specific integration test: - - cargo test --test int_test_name -- modname::test_name - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-bench(1)](cargo-bench.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-tree.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-tree.md deleted file mode 100644 index b1880b456..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-tree.md +++ /dev/null @@ -1,401 +0,0 @@ -# cargo-tree(1) - - - -## NAME - -cargo-tree - Display a tree visualization of a dependency graph - -## SYNOPSIS - -`cargo tree` [_options_] - -## DESCRIPTION - -This command will display a tree of dependencies to the terminal. An example -of a simple project that depends on the "rand" package: - -``` -myproject v0.1.0 (/myproject) -โ””โ”€โ”€ rand v0.7.3 - โ”œโ”€โ”€ getrandom v0.1.14 - โ”‚ โ”œโ”€โ”€ cfg-if v0.1.10 - โ”‚ โ””โ”€โ”€ libc v0.2.68 - โ”œโ”€โ”€ libc v0.2.68 (*) - โ”œโ”€โ”€ rand_chacha v0.2.2 - โ”‚ โ”œโ”€โ”€ ppv-lite86 v0.2.6 - โ”‚ โ””โ”€โ”€ rand_core v0.5.1 - โ”‚ โ””โ”€โ”€ getrandom v0.1.14 (*) - โ””โ”€โ”€ rand_core v0.5.1 (*) -[build-dependencies] -โ””โ”€โ”€ cc v1.0.50 -``` - -Packages marked with `(*)` have been "de-duplicated". The dependencies for the -package have already been shown elsewhere in the graph, and so are not -repeated. Use the `--no-dedupe` option to repeat the duplicates. - -The `-e` flag can be used to select the dependency kinds to display. The -"features" kind changes the output to display the features enabled by -each dependency. For example, `cargo tree -e features`: - -``` -myproject v0.1.0 (/myproject) -โ””โ”€โ”€ log feature "serde" - โ””โ”€โ”€ log v0.4.8 - โ”œโ”€โ”€ serde v1.0.106 - โ””โ”€โ”€ cfg-if feature "default" - โ””โ”€โ”€ cfg-if v0.1.10 -``` - -In this tree, `myproject` depends on `log` with the `serde` feature. `log` in -turn depends on `cfg-if` with "default" features. When using `-e features` it -can be helpful to use `-i` flag to show how the features flow into a package. -See the examples below for more detail. - -## OPTIONS - -### Tree Options - -
- -
-i spec
-
--invert spec
-
Show the reverse dependencies for the given package. This flag will invert -the tree and display the packages that depend on the given package.

-

Note that in a workspace, by default it will only display the package's -reverse dependencies inside the tree of the workspace member in the current -directory. The --workspace flag can be used to extend it so that it will -show the package's reverse dependencies across the entire workspace. The -p -flag can be used to display the package's reverse dependencies only with the -subtree of the package given to -p.

- - -
--prune spec
-
Prune the given package from the display of the dependency tree.
- - -
--depth depth
-
Maximum display depth of the dependency tree. A depth of 1 displays the direct -dependencies, for example.
- - -
--no-dedupe
-
Do not de-duplicate repeated dependencies. Usually, when a package has already -displayed its dependencies, further occurrences will not re-display its -dependencies, and will include a (*) to indicate it has already been shown. -This flag will cause those duplicates to be repeated.
- - -
-d
-
--duplicates
-
Show only dependencies which come in multiple versions (implies --invert). -When used with the -p flag, only shows duplicates within the subtree of the -given package.

-

It can be beneficial for build times and executable sizes to avoid building -that same package multiple times. This flag can help identify the offending -packages. You can then investigate if the package that depends on the -duplicate with the older version can be updated to the newer version so that -only one instance is built.

- - -
-e kinds
-
--edges kinds
-
The dependency kinds to display. Takes a comma separated list of values:

-
    -
  • all โ€” Show all edge kinds.
  • -
  • normal โ€” Show normal dependencies.
  • -
  • build โ€” Show build dependencies.
  • -
  • dev โ€” Show development dependencies.
  • -
  • features โ€” Show features enabled by each dependency. If this is the only -kind given, then it will automatically include the other dependency kinds.
  • -
  • no-normal โ€” Do not include normal dependencies.
  • -
  • no-build โ€” Do not include build dependencies.
  • -
  • no-dev โ€” Do not include development dependencies.
  • -
  • no-proc-macro โ€” Do not include procedural macro dependencies.
  • -
-

The normal, build, dev, and all dependency kinds cannot be mixed with -no-normal, no-build, or no-dev dependency kinds.

-

The default is normal,build,dev.

- - -
--target triple
-
Filter dependencies matching the given target-triple. The default is the host -platform. Use the value all to include all targets.
- - -
- -### Tree Formatting Options - -
- -
--charset charset
-
Chooses the character set to use for the tree. Valid values are "utf8" or -"ascii". Default is "utf8".
- - -
-f format
-
--format format
-
Set the format string for each package. The default is "{p}".

-

This is an arbitrary string which will be used to display each package. The following -strings will be replaced with the corresponding value:

-
    -
  • {p} โ€” The package name.
  • -
  • {l} โ€” The package license.
  • -
  • {r} โ€” The package repository URL.
  • -
  • {f} โ€” Comma-separated list of package features that are enabled.
  • -
  • {lib} โ€” The name, as used in a use statement, of the package's library.
  • -
- - -
--prefix prefix
-
Sets how each line is displayed. The prefix value can be one of:

-
    -
  • indent (default) โ€” Shows each line indented as a tree.
  • -
  • depth โ€” Show as a list, with the numeric depth printed before each entry.
  • -
  • none โ€” Show as a flat list.
  • -
- - -
- -### Package Selection - -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -`--manifest-path` is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. - -The default members of a workspace can be set explicitly with the -`workspace.default-members` key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -`--workspace`), and a non-virtual workspace will include only the root crate itself. - -
- -
-p spec...
-
--package spec...
-
Display only the specified packages. See cargo-pkgid(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like *, ? and []. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern.
- - -
--workspace
-
Display all members in the workspace.
- - - - -
--exclude SPEC...
-
Exclude the specified packages. Must be used in conjunction with the ---workspace flag. This flag may be specified multiple times and supports -common Unix glob patterns like *, ? and []. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern.
- - -
- - -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Feature Selection - -The feature flags allow you to control which features are enabled. When no -feature options are given, the `default` feature is activated for every -selected package. - -See [the features documentation](../reference/features.html#command-line-feature-options) -for more details. - -
- -
--features features
-
Space or comma separated list of features to activate. Features of workspace -members may be enabled with package-name/feature-name syntax. This flag may -be specified multiple times, which enables all specified features.
- - -
--all-features
-
Activate all available features of all selected packages.
- - -
--no-default-features
-
Do not activate the default feature of the selected packages.
- - -
- - -### Display Options - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Display the tree for the package in the current directory: - - cargo tree - -2. Display all the packages that depend on the `syn` package: - - cargo tree -i syn - -3. Show the features enabled on each package: - - cargo tree --format "{p} {f}" - -4. Show all packages that are built multiple times. This can happen if multiple - semver-incompatible versions appear in the tree (like 1.0.0 and 2.0.0). - - cargo tree -d - -5. Explain why features are enabled for the `syn` package: - - cargo tree -e features -i syn - - The `-e features` flag is used to show features. The `-i` flag is used to - invert the graph so that it displays the packages that depend on `syn`. An - example of what this would display: - - ``` - syn v1.0.17 - โ”œโ”€โ”€ syn feature "clone-impls" - โ”‚ โ””โ”€โ”€ syn feature "default" - โ”‚ โ””โ”€โ”€ rustversion v1.0.2 - โ”‚ โ””โ”€โ”€ rustversion feature "default" - โ”‚ โ””โ”€โ”€ myproject v0.1.0 (/myproject) - โ”‚ โ””โ”€โ”€ myproject feature "default" (command-line) - โ”œโ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "derive" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "full" - โ”‚ โ””โ”€โ”€ rustversion v1.0.2 (*) - โ”œโ”€โ”€ syn feature "parsing" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "printing" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ”œโ”€โ”€ syn feature "proc-macro" - โ”‚ โ””โ”€โ”€ syn feature "default" (*) - โ””โ”€โ”€ syn feature "quote" - โ”œโ”€โ”€ syn feature "printing" (*) - โ””โ”€โ”€ syn feature "proc-macro" (*) - ``` - - To read this graph, you can follow the chain for each feature from the root - to see why it is included. For example, the "full" feature is added by the - `rustversion` crate which is included from `myproject` (with the default - features), and `myproject` is the package selected on the command-line. All - of the other `syn` features are added by the "default" feature ("quote" is - added by "printing" and "proc-macro", both of which are default features). - - If you're having difficulty cross-referencing the de-duplicated `(*)` - entries, try with the `--no-dedupe` flag to get the full output. - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-metadata(1)](cargo-metadata.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-uninstall.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-uninstall.md deleted file mode 100644 index d4b9f7fe1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-uninstall.md +++ /dev/null @@ -1,127 +0,0 @@ -# cargo-uninstall(1) - -## NAME - -cargo-uninstall - Remove a Rust binary - -## SYNOPSIS - -`cargo uninstall` [_options_] [_spec_...] - -## DESCRIPTION - -This command removes a package installed with [cargo-install(1)](cargo-install.html). The _spec_ -argument is a package ID specification of the package to remove (see -[cargo-pkgid(1)](cargo-pkgid.html)). - -By default all binaries are removed for a crate but the `--bin` and -`--example` flags can be used to only remove particular binaries. - -The installation root is determined, in order of precedence: - -- `--root` option -- `CARGO_INSTALL_ROOT` environment variable -- `install.root` Cargo [config value](../reference/config.html) -- `CARGO_HOME` environment variable -- `$HOME/.cargo` - - -## OPTIONS - -### Install Options - -
- -
-p
-
--package spec...
-
Package to uninstall.
- - -
--bin name...
-
Only uninstall the binary name.
- - -
--root dir
-
Directory to uninstall packages from.
- - -
- -### Display Options - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Uninstall a previously installed package. - - cargo uninstall ripgrep - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-install(1)](cargo-install.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-update.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-update.md deleted file mode 100644 index f5bf623d1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-update.md +++ /dev/null @@ -1,180 +0,0 @@ -# cargo-update(1) - -## NAME - -cargo-update - Update dependencies as recorded in the local lock file - -## SYNOPSIS - -`cargo update` [_options_] - -## DESCRIPTION - -This command will update dependencies in the `Cargo.lock` file to the latest -version. If the `Cargo.lock` file does not exist, it will be created with the -latest available versions. - -## OPTIONS - -### Update Options - -
- -
-p spec...
-
--package spec...
-
Update only the specified packages. This flag may be specified -multiple times. See cargo-pkgid(1) for the SPEC format.

-

If packages are specified with the -p flag, then a conservative update of -the lockfile will be performed. This means that only the dependency specified -by SPEC will be updated. Its transitive dependencies will be updated only if -SPEC cannot be updated without updating dependencies. All other dependencies -will remain locked at their currently recorded versions.

-

If -p is not specified, all dependencies are updated.

- - -
--aggressive
-
When used with -p, dependencies of spec are forced to update as well. -Cannot be used with --precise.
- - -
--precise precise
-
When used with -p, allows you to specify a specific version number to set -the package to. If the package comes from a git repository, this can be a git -revision (such as a SHA hash or tag).
- - -
-w
-
--workspace
-
Attempt to update only packages defined in the workspace. Other packages -are updated only if they don't already exist in the lockfile. This -option is useful for updating Cargo.lock after you've changed version -numbers in Cargo.toml.
- - -
--dry-run
-
Displays what would be updated, but doesn't actually write the lockfile.
- - -
- -### Display Options - -
-
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - -
- -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Update all dependencies in the lockfile: - - cargo update - -2. Update only specific dependencies: - - cargo update -p foo -p bar - -3. Set a specific dependency to a specific version: - - cargo update -p foo --precise 1.2.3 - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-generate-lockfile(1)](cargo-generate-lockfile.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-vendor.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-vendor.md deleted file mode 100644 index b1c6f858a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-vendor.md +++ /dev/null @@ -1,177 +0,0 @@ -# cargo-vendor(1) - -## NAME - -cargo-vendor - Vendor all dependencies locally - -## SYNOPSIS - -`cargo vendor` [_options_] [_path_] - -## DESCRIPTION - -This cargo subcommand will vendor all crates.io and git dependencies for a -project into the specified directory at ``. After this command completes -the vendor directory specified by `` will contain all remote sources from -dependencies specified. Additional manifests beyond the default one can be -specified with the `-s` option. - -The `cargo vendor` command will also print out the configuration necessary -to use the vendored sources, which you will need to add to `.cargo/config.toml`. - -## OPTIONS - -### Vendor Options - -
- -
-s manifest
-
--sync manifest
-
Specify extra Cargo.toml manifests to workspaces which should also be -vendored and synced to the output.
- - -
--no-delete
-
Don't delete the "vendor" directory when vendoring, but rather keep all -existing contents of the vendor directory
- - -
--respect-source-config
-
Instead of ignoring [source] configuration by default in .cargo/config.toml -read it and use it when downloading crates from crates.io, for example
- - -
--versioned-dirs
-
Normally versions are only added to disambiguate multiple versions of the -same package. This option causes all directories in the "vendor" directory -to be versioned, which makes it easier to track the history of vendored -packages over time, and can help with the performance of re-vendoring when -only a subset of the packages have changed.
- - -
- -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Display Options - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Vendor all dependencies into a local "vendor" folder - - cargo vendor - -2. Vendor all dependencies into a local "third-party/vendor" folder - - cargo vendor third-party/vendor - -3. Vendor the current workspace as well as another to "vendor" - - cargo vendor -s ../path/to/Cargo.toml - -## SEE ALSO -[cargo(1)](cargo.html) - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-verify-project.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-verify-project.md deleted file mode 100644 index 55f8885ca..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-verify-project.md +++ /dev/null @@ -1,137 +0,0 @@ -# cargo-verify-project(1) - -## NAME - -cargo-verify-project - Check correctness of crate manifest - -## SYNOPSIS - -`cargo verify-project` [_options_] - -## DESCRIPTION - -This command will parse the local manifest and check its validity. It emits a -JSON object with the result. A successful validation will display: - - {"success":"true"} - -An invalid workspace will display: - - {"invalid":"human-readable error message"} - -## OPTIONS - -### Display Options - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
- -### Manifest Options - -
- -
--manifest-path path
-
Path to the Cargo.toml file. By default, Cargo searches for the -Cargo.toml file in the current directory or any parent directory.
- - - -
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: The workspace is OK. -* `1`: The workspace is invalid. - -## EXAMPLES - -1. Check the current workspace for errors: - - cargo verify-project - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-package(1)](cargo-package.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-version.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-version.md deleted file mode 100644 index 1ceacf811..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-version.md +++ /dev/null @@ -1,42 +0,0 @@ -# cargo-version(1) - -## NAME - -cargo-version - Show version information - -## SYNOPSIS - -`cargo version` [_options_] - -## DESCRIPTION - -Displays the version of Cargo. - -## OPTIONS - -
- -
-v
-
--verbose
-
Display additional version information.
- - -
- -## EXAMPLES - -1. Display the version: - - cargo version - -2. The version is also available via flags: - - cargo --version - cargo -V - -3. Display extra version information: - - cargo -Vv - -## SEE ALSO -[cargo(1)](cargo.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-yank.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-yank.md deleted file mode 100644 index 5dbda77d9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo-yank.md +++ /dev/null @@ -1,145 +0,0 @@ -# cargo-yank(1) - -## NAME - -cargo-yank - Remove a pushed crate from the index - -## SYNOPSIS - -`cargo yank` [_options_] `--vers` _version_ [_crate_] - -## DESCRIPTION - -The yank command removes a previously published crate's version from the -server's index. This command does not delete any data, and the crate will -still be available for download via the registry's download link. - -Note that existing crates locked to a yanked version will still be able to -download the yanked version to use it. Cargo will, however, not allow any new -crates to be locked to any yanked version. - -This command requires you to be authenticated with either the `--token` option -or using [cargo-login(1)](cargo-login.html). - -If the crate name is not specified, it will use the package name from the -current directory. - -## OPTIONS - -### Yank Options - -
- -
--vers version
-
The version to yank or un-yank.
- - -
--undo
-
Undo a yank, putting a version back into the index.
- - -
--token token
-
API token to use when authenticating. This overrides the token stored in -the credentials file (which is created by cargo-login(1)).

-

Cargo config environment variables can be -used to override the tokens stored in the credentials file. The token for -crates.io may be specified with the CARGO_REGISTRY_TOKEN environment -variable. Tokens for other registries may be specified with environment -variables of the form CARGO_REGISTRIES_NAME_TOKEN where NAME is the name -of the registry in all capital letters.

- - - -
--index index
-
The URL of the registry index to use.
- - - -
--registry registry
-
Name of the registry to use. Registry names are defined in Cargo config -files. If not specified, the default registry is used, -which is defined by the registry.default config key which defaults to -crates-io.
- - - -
- -### Display Options - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## EXAMPLES - -1. Yank a crate from the index: - - cargo yank --vers 1.0.7 foo - -## SEE ALSO -[cargo(1)](cargo.html), [cargo-login(1)](cargo-login.html), [cargo-publish(1)](cargo-publish.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo.md deleted file mode 100644 index 5a61239e1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/cargo.md +++ /dev/null @@ -1,315 +0,0 @@ -# cargo(1) - -## NAME - -cargo - The Rust package manager - -## SYNOPSIS - -`cargo` [_options_] _command_ [_args_]\ -`cargo` [_options_] `--version`\ -`cargo` [_options_] `--list`\ -`cargo` [_options_] `--help`\ -`cargo` [_options_] `--explain` _code_ - -## DESCRIPTION - -This program is a package manager and build tool for the Rust language, -available at . - -## COMMANDS - -### Build Commands - -[cargo-bench(1)](cargo-bench.html)\ -    Execute benchmarks of a package. - -[cargo-build(1)](cargo-build.html)\ -    Compile a package. - -[cargo-check(1)](cargo-check.html)\ -    Check a local package and all of its dependencies for errors. - -[cargo-clean(1)](cargo-clean.html)\ -    Remove artifacts that Cargo has generated in the past. - -[cargo-doc(1)](cargo-doc.html)\ -    Build a package's documentation. - -[cargo-fetch(1)](cargo-fetch.html)\ -    Fetch dependencies of a package from the network. - -[cargo-fix(1)](cargo-fix.html)\ -    Automatically fix lint warnings reported by rustc. - -[cargo-run(1)](cargo-run.html)\ -    Run a binary or example of the local package. - -[cargo-rustc(1)](cargo-rustc.html)\ -    Compile a package, and pass extra options to the compiler. - -[cargo-rustdoc(1)](cargo-rustdoc.html)\ -    Build a package's documentation, using specified custom flags. - -[cargo-test(1)](cargo-test.html)\ -    Execute unit and integration tests of a package. - -### Manifest Commands - -[cargo-generate-lockfile(1)](cargo-generate-lockfile.html)\ -    Generate `Cargo.lock` for a project. - -[cargo-locate-project(1)](cargo-locate-project.html)\ -    Print a JSON representation of a `Cargo.toml` file's location. - -[cargo-metadata(1)](cargo-metadata.html)\ -    Output the resolved dependencies of a package in machine-readable format. - -[cargo-pkgid(1)](cargo-pkgid.html)\ -    Print a fully qualified package specification. - -[cargo-tree(1)](cargo-tree.html)\ -    Display a tree visualization of a dependency graph. - -[cargo-update(1)](cargo-update.html)\ -    Update dependencies as recorded in the local lock file. - -[cargo-vendor(1)](cargo-vendor.html)\ -    Vendor all dependencies locally. - -[cargo-verify-project(1)](cargo-verify-project.html)\ -    Check correctness of crate manifest. - -### Package Commands - -[cargo-init(1)](cargo-init.html)\ -    Create a new Cargo package in an existing directory. - -[cargo-install(1)](cargo-install.html)\ -    Build and install a Rust binary. - -[cargo-new(1)](cargo-new.html)\ -    Create a new Cargo package. - -[cargo-search(1)](cargo-search.html)\ -    Search packages in crates.io. - -[cargo-uninstall(1)](cargo-uninstall.html)\ -    Remove a Rust binary. - -### Publishing Commands - -[cargo-login(1)](cargo-login.html)\ -    Save an API token from the registry locally. - -[cargo-owner(1)](cargo-owner.html)\ -    Manage the owners of a crate on the registry. - -[cargo-package(1)](cargo-package.html)\ -    Assemble the local package into a distributable tarball. - -[cargo-publish(1)](cargo-publish.html)\ -    Upload a package to the registry. - -[cargo-yank(1)](cargo-yank.html)\ -    Remove a pushed crate from the index. - -### General Commands - -[cargo-help(1)](cargo-help.html)\ -    Display help information about Cargo. - -[cargo-version(1)](cargo-version.html)\ -    Show version information. - -## OPTIONS - -### Special Options - -
- -
-V
-
--version
-
Print version info and exit. If used with --verbose, prints extra -information.
- - -
--list
-
List all installed Cargo subcommands. If used with --verbose, prints extra -information.
- - -
--explain code
-
Run rustc --explain CODE which will print out a detailed explanation of an -error message (for example, E0004).
- - -
- -### Display Options - -
- -
-v
-
--verbose
-
Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the term.verbose -config value.
- - -
-q
-
--quiet
-
Do not print cargo log messages. -May also be specified with the term.quiet -config value.
- - -
--color when
-
Control when colored output is used. Valid values:

-
    -
  • auto (default): Automatically detect if color support is available on the -terminal.
  • -
  • always: Always display colors.
  • -
  • never: Never display colors.
  • -
-

May also be specified with the term.color -config value.

- - - -
- -### Manifest Options - -
-
--frozen
-
--locked
-
Either of these flags requires that the Cargo.lock file is -up-to-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The --frozen flag also prevents Cargo from -attempting to access the network to determine if it is out-of-date.

-

These may be used in environments where you want to assert that the -Cargo.lock file is up-to-date (such as a CI build) or want to avoid network -access.

- - -
--offline
-
Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible.

-

Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the cargo-fetch(1) command to download dependencies before going -offline.

-

May also be specified with the net.offline config value.

- - -
- -### Common Options - -
- -
+toolchain
-
If Cargo has been installed with rustup, and the first argument to cargo -begins with +, it will be interpreted as a rustup toolchain name (such -as +stable or +nightly). -See the rustup documentation -for more information about how toolchain overrides work.
- - -
-h
-
--help
-
Prints help information.
- - -
-Z flag
-
Unstable (nightly-only) flags to Cargo. Run cargo -Z help for details.
- - -
- - -## ENVIRONMENT - -See [the reference](../reference/environment-variables.html) for -details on environment variables that Cargo reads. - - -## EXIT STATUS - -* `0`: Cargo succeeded. -* `101`: Cargo failed to complete. - - -## FILES - -`~/.cargo/`\ -    Default location for Cargo's "home" directory where it -stores various files. The location can be changed with the `CARGO_HOME` -environment variable. - -`$CARGO_HOME/bin/`\ -    Binaries installed by [cargo-install(1)](cargo-install.html) will be located here. If using -[rustup], executables distributed with Rust are also located here. - -`$CARGO_HOME/config.toml`\ -    The global configuration file. See [the reference](../reference/config.html) -for more information about configuration files. - -`.cargo/config.toml`\ -    Cargo automatically searches for a file named `.cargo/config.toml` in the -current directory, and all parent directories. These configuration files -will be merged with the global configuration file. - -`$CARGO_HOME/credentials.toml`\ -    Private authentication information for logging in to a registry. - -`$CARGO_HOME/registry/`\ -    This directory contains cached downloads of the registry index and any -downloaded dependencies. - -`$CARGO_HOME/git/`\ -    This directory contains cached downloads of git dependencies. - -Please note that the internal structure of the `$CARGO_HOME` directory is not -stable yet and may be subject to change. - -[rustup]: https://rust-lang.github.io/rustup/ - -## EXAMPLES - -1. Build a local package and all of its dependencies: - - cargo build - -2. Build a package with optimizations: - - cargo build --release - -3. Run tests for a cross-compiled target: - - cargo test --target i686-unknown-linux-gnu - -4. Create a new package that builds an executable: - - cargo new foobar - -5. Create a package in the current directory: - - mkdir foo && cd foo - cargo init . - -6. Learn about a command's options and usage: - - cargo help clean - -## BUGS - -See for issues. - -## SEE ALSO -[rustc(1)](https://doc.rust-lang.org/rustc/index.html), [rustdoc(1)](https://doc.rust-lang.org/rustdoc/index.html) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/general-commands.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/general-commands.md deleted file mode 100644 index eaad4fbc3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/general-commands.md +++ /dev/null @@ -1,4 +0,0 @@ -# General Commands -* [cargo](cargo.md) -* [cargo help](cargo-help.md) -* [cargo version](cargo-version.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/index.md deleted file mode 100644 index 362a53ec0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/index.md +++ /dev/null @@ -1,6 +0,0 @@ -# Cargo Commands -* [General Commands](general-commands.md) -* [Build Commands](build-commands.md) -* [Manifest Commands](manifest-commands.md) -* [Package Commands](package-commands.md) -* [Publishing Commands](publishing-commands.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/manifest-commands.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/manifest-commands.md deleted file mode 100644 index e9b5c708d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/manifest-commands.md +++ /dev/null @@ -1,9 +0,0 @@ -# Manifest Commands -* [cargo generate-lockfile](cargo-generate-lockfile.md) -* [cargo locate-project](cargo-locate-project.md) -* [cargo metadata](cargo-metadata.md) -* [cargo pkgid](cargo-pkgid.md) -* [cargo tree](cargo-tree.md) -* [cargo update](cargo-update.md) -* [cargo vendor](cargo-vendor.md) -* [cargo verify-project](cargo-verify-project.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/package-commands.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/package-commands.md deleted file mode 100644 index 783abaff8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/package-commands.md +++ /dev/null @@ -1,6 +0,0 @@ -# Package Commands -* [cargo init](cargo-init.md) -* [cargo install](cargo-install.md) -* [cargo new](cargo-new.md) -* [cargo search](cargo-search.md) -* [cargo uninstall](cargo-uninstall.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/publishing-commands.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/publishing-commands.md deleted file mode 100644 index 372faada2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/commands/publishing-commands.md +++ /dev/null @@ -1,6 +0,0 @@ -# Publishing Commands -* [cargo login](cargo-login.md) -* [cargo owner](cargo-owner.md) -* [cargo package](cargo-package.md) -* [cargo publish](cargo-publish.md) -* [cargo yank](cargo-yank.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/faq.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/faq.md deleted file mode 100644 index a993deb18..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/faq.md +++ /dev/null @@ -1,260 +0,0 @@ -## Frequently Asked Questions - -### Is the plan to use GitHub as a package repository? - -No. The plan for Cargo is to use [crates.io], like npm or Rubygems do with -npmjs.org and rubygems.org. - -We plan to support git repositories as a source of packages forever, -because they can be used for early development and temporary patches, -even when people use the registry as the primary source of packages. - -### Why build crates.io rather than use GitHub as a registry? - -We think that itโ€™s very important to support multiple ways to download -packages, including downloading from GitHub and copying packages into -your package itself. - -That said, we think that [crates.io] offers a number of important benefits, and -will likely become the primary way that people download packages in Cargo. - -For precedent, both Node.jsโ€™s [npm][1] and Rubyโ€™s [bundler][2] support both a -central registry model as well as a Git-based model, and most packages -are downloaded through the registry in those ecosystems, with an -important minority of packages making use of git-based packages. - -[1]: https://www.npmjs.org -[2]: https://bundler.io - -Some of the advantages that make a central registry popular in other -languages include: - -* **Discoverability**. A central registry provides an easy place to look - for existing packages. Combined with tagging, this also makes it - possible for a registry to provide ecosystem-wide information, such as a - list of the most popular or most-depended-on packages. -* **Speed**. A central registry makes it possible to easily fetch just - the metadata for packages quickly and efficiently, and then to - efficiently download just the published package, and not other bloat - that happens to exist in the repository. This adds up to a significant - improvement in the speed of dependency resolution and fetching. As - dependency graphs scale up, downloading all of the git repositories bogs - down fast. Also remember that not everybody has a high-speed, - low-latency Internet connection. - -### Will Cargo work with C code (or other languages)? - -Yes! - -Cargo handles compiling Rust code, but we know that many Rust packages -link against C code. We also know that there are decades of tooling -built up around compiling languages other than Rust. - -Our solution: Cargo allows a package to [specify a script](reference/build-scripts.md) -(written in Rust) to run before invoking `rustc`. Rust is leveraged to -implement platform-specific configuration and refactor out common build -functionality among packages. - -### Can Cargo be used inside of `make` (or `ninja`, or ...) - -Indeed. While we intend Cargo to be useful as a standalone way to -compile Rust packages at the top-level, we know that some people will -want to invoke Cargo from other build tools. - -We have designed Cargo to work well in those contexts, paying attention -to things like error codes and machine-readable output modes. We still -have some work to do on those fronts, but using Cargo in the context of -conventional scripts is something we designed for from the beginning and -will continue to prioritize. - -### Does Cargo handle multi-platform packages or cross-compilation? - -Rust itself provides facilities for configuring sections of code based -on the platform. Cargo also supports [platform-specific -dependencies][target-deps], and we plan to support more per-platform -configuration in `Cargo.toml` in the future. - -[target-deps]: reference/specifying-dependencies.md#platform-specific-dependencies - -In the longer-term, weโ€™re looking at ways to conveniently cross-compile -packages using Cargo. - -### Does Cargo support environments, like `production` or `test`? - -We support environments through the use of [profiles] to support: - -[profiles]: reference/profiles.md - -* environment-specific flags (like `-g --opt-level=0` for development - and `--opt-level=3` for production). -* environment-specific dependencies (like `hamcrest` for test assertions). -* environment-specific `#[cfg]` -* a `cargo test` command - -### Does Cargo work on Windows? - -Yes! - -All commits to Cargo are required to pass the local test suite on Windows. -If, however, you find a Windows issue, we consider it a bug, so [please file an -issue][3]. - -[3]: https://github.com/rust-lang/cargo/issues - -### Why do binaries have `Cargo.lock` in version control, but not libraries? - -The purpose of a `Cargo.lock` lockfile is to describe the state of the world at -the time of a successful build. Cargo uses the lockfile to provide -deterministic builds on different times and different systems, by ensuring that -the exact same dependencies and versions are used as when the `Cargo.lock` file -was originally generated. - -This property is most desirable from applications and packages which are at the -very end of the dependency chain (binaries). As a result, it is recommended that -all binaries check in their `Cargo.lock`. - -For libraries the situation is somewhat different. A library is not only used by -the library developers, but also any downstream consumers of the library. Users -dependent on the library will not inspect the libraryโ€™s `Cargo.lock` (even if it -exists). This is precisely because a library should **not** be deterministically -recompiled for all users of the library. - -If a library ends up being used transitively by several dependencies, itโ€™s -likely that just a single copy of the library is desired (based on semver -compatibility). If Cargo used all of the dependencies' `Cargo.lock` files, -then multiple copies of the library could be used, and perhaps even a version -conflict. - -In other words, libraries specify SemVer requirements for their dependencies but -cannot see the full picture. Only end products like binaries have a full -picture to decide what versions of dependencies should be used. - -### Can libraries use `*` as a version for their dependencies? - -**As of January 22nd, 2016, [crates.io] rejects all packages (not just libraries) -with wildcard dependency constraints.** - -While libraries _can_, strictly speaking, they should not. A version requirement -of `*` says โ€œThis will work with every version ever,โ€ which is never going -to be true. Libraries should always specify the range that they do work with, -even if itโ€™s something as general as โ€œevery 1.x.y version.โ€ - -### Why `Cargo.toml`? - -As one of the most frequent interactions with Cargo, the question of why the -configuration file is named `Cargo.toml` arises from time to time. The leading -capital-`C` was chosen to ensure that the manifest was grouped with other -similar configuration files in directory listings. Sorting files often puts -capital letters before lowercase letters, ensuring files like `Makefile` and -`Cargo.toml` are placed together. The trailing `.toml` was chosen to emphasize -the fact that the file is in the [TOML configuration -format](https://toml.io/). - -Cargo does not allow other names such as `cargo.toml` or `Cargofile` to -emphasize the ease of how a Cargo repository can be identified. An option of -many possible names has historically led to confusion where one case was handled -but others were accidentally forgotten. - -[crates.io]: https://crates.io/ - -### How can Cargo work offline? - -Cargo is often used in situations with limited or no network access such as -airplanes, CI environments, or embedded in large production deployments. Users -are often surprised when Cargo attempts to fetch resources from the network, and -hence the request for Cargo to work offline comes up frequently. - -Cargo, at its heart, will not attempt to access the network unless told to do -so. That is, if no crates come from crates.io, a git repository, or some other -network location, Cargo will never attempt to make a network connection. As a -result, if Cargo attempts to touch the network, then it's because it needs to -fetch a required resource. - -Cargo is also quite aggressive about caching information to minimize the amount -of network activity. It will guarantee, for example, that if `cargo build` (or -an equivalent) is run to completion then the next `cargo build` is guaranteed to -not touch the network so long as `Cargo.toml` has not been modified in the -meantime. This avoidance of the network boils down to a `Cargo.lock` existing -and a populated cache of the crates reflected in the lock file. If either of -these components are missing, then they're required for the build to succeed and -must be fetched remotely. - -As of Rust 1.11.0, Cargo understands a new flag, `--frozen`, which is an -assertion that it shouldn't touch the network. When passed, Cargo will -immediately return an error if it would otherwise attempt a network request. -The error should include contextual information about why the network request is -being made in the first place to help debug as well. Note that this flag *does -not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch -the network as a previous command has been run to ensure that network activity -shouldn't be necessary. - -The `--offline` flag was added in Rust 1.36.0. This flag tells Cargo to not -access the network, and try to proceed with available cached data if possible. -You can use [`cargo fetch`] in one project to download dependencies before -going offline, and then use those same dependencies in another project with -the `--offline` flag (or [configuration value][offline config]). - -For more information about vendoring, see documentation on [source -replacement][replace]. - -[replace]: reference/source-replacement.md -[`cargo fetch`]: commands/cargo-fetch.md -[offline config]: reference/config.md#netoffline - -### Why is Cargo rebuilding my code? - -Cargo is responsible for incrementally compiling crates in your project. This -means that if you type `cargo build` twice the second one shouldn't rebuild your -crates.io dependencies, for example. Nevertheless bugs arise and Cargo can -sometimes rebuild code when you're not expecting it! - -We've long [wanted to provide better diagnostics about -this](https://github.com/rust-lang/cargo/issues/2904) but unfortunately haven't -been able to make progress on that issue in quite some time. In the meantime, -however, you can debug a rebuild at least a little by setting the `CARGO_LOG` -environment variable: - -```sh -$ CARGO_LOG=cargo::core::compiler::fingerprint=info cargo build -``` - -This will cause Cargo to print out a lot of information about diagnostics and -rebuilding. This can often contain clues as to why your project is getting -rebuilt, although you'll often need to connect some dots yourself since this -output isn't super easy to read just yet. Note that the `CARGO_LOG` needs to be -set for the command that rebuilds when you think it should not. Unfortunately -Cargo has no way right now of after-the-fact debugging "why was that rebuilt?" - -Some issues we've seen historically which can cause crates to get rebuilt are: - -* A build script prints `cargo:rerun-if-changed=foo` where `foo` is a file that - doesn't exist and nothing generates it. In this case Cargo will keep running - the build script thinking it will generate the file but nothing ever does. The - fix is to avoid printing `rerun-if-changed` in this scenario. - -* Two successive Cargo builds may differ in the set of features enabled for some - dependencies. For example if the first build command builds the whole - workspace and the second command builds only one crate, this may cause a - dependency on crates.io to have a different set of features enabled, causing - it and everything that depends on it to get rebuilt. There's unfortunately not - really a great fix for this, although if possible it's best to have the set of - features enabled on a crate constant regardless of what you're building in - your workspace. - -* Some filesystems exhibit unusual behavior around timestamps. Cargo primarily - uses timestamps on files to govern whether rebuilding needs to happen, but if - you're using a nonstandard filesystem it may be affecting the timestamps - somehow (e.g. truncating them, causing them to drift, etc). In this scenario, - feel free to open an issue and we can see if we can accommodate the filesystem - somehow. - -* A concurrent build process is either deleting artifacts or modifying files. - Sometimes you might have a background process that either tries to build or - check your project. These background processes might surprisingly delete some - build artifacts or touch files (or maybe just by accident), which can cause - rebuilds to look spurious! The best fix here would be to wrangle the - background process to avoid clashing with your work. - -If after trying to debug your issue, however, you're still running into problems -then feel free to [open an -issue](https://github.com/rust-lang/cargo/issues/new)! diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/first-steps.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/first-steps.md deleted file mode 100644 index 15bb4bdc7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/first-steps.md +++ /dev/null @@ -1,82 +0,0 @@ -## First Steps with Cargo - -This section provides a quick sense for the `cargo` command line tool. We -demonstrate its ability to generate a new [***package***][def-package] for us, -its ability to compile the [***crate***][def-crate] within the package, and -its ability to run the resulting program. - -To start a new package with Cargo, use `cargo new`: - -```console -$ cargo new hello_world -``` - -Cargo defaults to `--bin` to make a binary program. To make a library, we -would pass `--lib`, instead. - -Letโ€™s check out what Cargo has generated for us: - -```console -$ cd hello_world -$ tree . -. -โ”œโ”€โ”€ Cargo.toml -โ””โ”€โ”€ src - โ””โ”€โ”€ main.rs - -1 directory, 2 files -``` - -This is all we need to get started. First, letโ€™s check out `Cargo.toml`: - -```toml -[package] -name = "hello_world" -version = "0.1.0" -edition = "2021" - -[dependencies] -``` - -This is called a [***manifest***][def-manifest], and it contains all of the -metadata that Cargo needs to compile your package. - -Hereโ€™s whatโ€™s in `src/main.rs`: - -```rust -fn main() { - println!("Hello, world!"); -} -``` - -Cargo generated a โ€œhello worldโ€ program for us, otherwise known as a -[***binary crate***][def-crate]. Letโ€™s compile it: - -```console -$ cargo build - Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) -``` - -And then run it: - -```console -$ ./target/debug/hello_world -Hello, world! -``` - -We can also use `cargo run` to compile and then run it, all in one step: - -```console -$ cargo run - Fresh hello_world v0.1.0 (file:///path/to/package/hello_world) - Running `target/hello_world` -Hello, world! -``` - -### Going further - -For more details on using Cargo, check out the [Cargo Guide](../guide/index.md) - -[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' -[def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)' -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/index.md deleted file mode 100644 index 710e9943b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/index.md +++ /dev/null @@ -1,9 +0,0 @@ -## Getting Started - -To get started with Cargo, install Cargo (and Rust) and set up your first -[*crate*][def-crate]. - -* [Installation](installation.md) -* [First steps with Cargo](first-steps.md) - -[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/installation.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/installation.md deleted file mode 100644 index 4c6669160..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/getting-started/installation.md +++ /dev/null @@ -1,37 +0,0 @@ -## Installation - -### Install Rust and Cargo - -The easiest way to get Cargo is to install the current stable release of [Rust] -by using `rustup`. Installing Rust using `rustup` will also install `cargo`. - -On Linux and macOS systems, this is done as follows: - -```console -curl https://sh.rustup.rs -sSf | sh -``` - -It will download a script, and start the installation. If everything goes well, -youโ€™ll see this appear: - -```console -Rust is installed now. Great! -``` - -On Windows, download and run [rustup-init.exe]. It will start the installation -in a console and present the above message on success. - -After this, you can use the `rustup` command to also install `beta` or `nightly` -channels for Rust and Cargo. - -For other installation options and information, visit the -[install][install-rust] page of the Rust website. - -### Build and Install Cargo from Source - -Alternatively, you can [build Cargo from source][compiling-from-source]. - -[rust]: https://www.rust-lang.org/ -[rustup-init.exe]: https://win.rustup.rs/ -[install-rust]: https://www.rust-lang.org/tools/install -[compiling-from-source]: https://github.com/rust-lang/cargo#compiling-from-source diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/build-cache.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/build-cache.md deleted file mode 100644 index a8453f615..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/build-cache.md +++ /dev/null @@ -1,107 +0,0 @@ -## Build cache - -Cargo stores the output of a build into the "target" directory. By default, -this is the directory named `target` in the root of your -[*workspace*][def-workspace]. To change the location, you can set the -`CARGO_TARGET_DIR` [environment variable], the [`build.target-dir`] config -value, or the `--target-dir` command-line flag. - -The directory layout depends on whether or not you are using the `--target` -flag to build for a specific platform. If `--target` is not specified, Cargo -runs in a mode where it builds for the host architecture. The output goes into -the root of the target directory, with each [profile] stored in a separate -subdirectory: - -Directory | Description -----------|------------ -target/debug/ | Contains output for the `dev` profile. -target/release/ | Contains output for the `release` profile (with the `--release` option). -target/foo/ | Contains build output for the `foo` profile (with the `--profile=foo` option). - -For historical reasons, the `dev` and `test` profiles are stored in the -`debug` directory, and the `release` and `bench` profiles are stored in the -`release` directory. User-defined profiles are stored in a directory with the -same name as the profile. - -When building for another target with `--target`, the output is placed in a -directory with the name of the target: - -Directory | Example -----------|-------- -target/<triple>/debug/ | target/thumbv7em-none-eabihf/debug/ -target/<triple>/release/ | target/thumbv7em-none-eabihf/release/ - -> **Note**: When not using `--target`, this has a consequence that Cargo will -> share your dependencies with build scripts and proc macros. [`RUSTFLAGS`] -> will be shared with every `rustc` invocation. With the `--target` flag, -> build scripts and proc macros are built separately (for the host -> architecture), and do not share `RUSTFLAGS`. - -Within the profile directory (such as `debug` or `release`), artifacts are -placed into the following directories: - -Directory | Description -----------|------------ -target/debug/ | Contains the output of the package being built (the [binary executables] and [library targets]). -target/debug/examples/ | Contains [example targets]. - -Some commands place their output in dedicated directories in the top level of -the `target` directory: - -Directory | Description -----------|------------ -target/doc/ | Contains rustdoc documentation ([`cargo doc`]). -target/package/ | Contains the output of the [`cargo package`] and [`cargo publish`] commands. - -Cargo also creates several other directories and files needed for the build -process. Their layout is considered internal to Cargo, and is subject to -change. Some of these directories are: - -Directory | Description -----------|------------ -target/debug/deps/ | Dependencies and other artifacts. -target/debug/incremental/ | `rustc` [incremental output], a cache used to speed up subsequent builds. -target/debug/build/ | Output from [build scripts]. - -### Dep-info files - -Next to each compiled artifact is a file called a "dep info" file with a `.d` -suffix. This file is a Makefile-like syntax that indicates all of the file -dependencies required to rebuild the artifact. These are intended to be used -with external build systems so that they can detect if Cargo needs to be -re-executed. The paths in the file are absolute by default. See the -[`build.dep-info-basedir`] config option to use relative paths. - -```Makefile -# Example dep-info file found in target/debug/foo.d -/path/to/myproj/target/debug/foo: /path/to/myproj/src/lib.rs /path/to/myproj/src/main.rs -``` - -### Shared cache - -A third party tool, [sccache], can be used to share built dependencies across -different workspaces. - -To setup `sccache`, install it with `cargo install sccache` and set -`RUSTC_WRAPPER` environmental variable to `sccache` before invoking Cargo. If -you use bash, it makes sense to add `export RUSTC_WRAPPER=sccache` to -`.bashrc`. Alternatively, you can set [`build.rustc-wrapper`] in the [Cargo -configuration][config]. Refer to sccache documentation for more details. - -[`RUSTFLAGS`]: ../reference/config.md#buildrustflags -[`build.dep-info-basedir`]: ../reference/config.md#builddep-info-basedir -[`build.rustc-wrapper`]: ../reference/config.md#buildrustc-wrapper -[`build.target-dir`]: ../reference/config.md#buildtarget-dir -[`cargo doc`]: ../commands/cargo-doc.md -[`cargo package`]: ../commands/cargo-package.md -[`cargo publish`]: ../commands/cargo-publish.md -[build scripts]: ../reference/build-scripts.md -[config]: ../reference/config.md -[def-workspace]: ../appendix/glossary.md#workspace '"workspace" (glossary entry)' -[environment variable]: ../reference/environment-variables.md -[incremental output]: ../reference/profiles.md#incremental -[sccache]: https://github.com/mozilla/sccache -[profile]: ../reference/profiles.md -[binary executables]: ../reference/cargo-targets.md#binaries -[library targets]: ../reference/cargo-targets.md#library -[example targets]: ../reference/cargo-targets.md#examples diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/cargo-home.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/cargo-home.md deleted file mode 100644 index b93c8559b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/cargo-home.md +++ /dev/null @@ -1,89 +0,0 @@ -## Cargo Home - -The "Cargo home" functions as a download and source cache. -When building a [crate][def-crate], Cargo stores downloaded build dependencies in the Cargo home. -You can alter the location of the Cargo home by setting the `CARGO_HOME` [environmental variable][env]. -The [home](https://crates.io/crates/home) crate provides an API for getting this location if you need this information inside your Rust crate. -By default, the Cargo home is located in `$HOME/.cargo/`. - -Please note that the internal structure of the Cargo home is not stabilized and may be subject to change at any time. - -The Cargo home consists of following components: - -## Files: - -* `config.toml` - Cargo's global configuration file, see the [config entry in the reference][config]. - -* `credentials.toml` - Private login credentials from [`cargo login`] in order to log in to a [registry][def-registry]. - -* `.crates.toml`, `.crates2.json` - These hidden files contain [package][def-package] information of crates installed via [`cargo install`]. Do NOT edit by hand! - -## Directories: - -* `bin` -The bin directory contains executables of crates that were installed via [`cargo install`] or [`rustup`](https://rust-lang.github.io/rustup/). -To be able to make these binaries accessible, add the path of the directory to your `$PATH` environment variable. - - * `git` - Git sources are stored here: - - * `git/db` - When a crate depends on a git repository, Cargo clones the repo as a bare repo into this directory and updates it if necessary. - - * `git/checkouts` - If a git source is used, the required commit of the repo is checked out from the bare repo inside `git/db` into this directory. - This provides the compiler with the actual files contained in the repo of the commit specified for that dependency. - Multiple checkouts of different commits of the same repo are possible. - -* `registry` - Packages and metadata of crate registries (such as [crates.io](https://crates.io/)) are located here. - - * `registry/index` - The index is a bare git repository which contains the metadata (versions, dependencies etc) of all available crates of a registry. - - * `registry/cache` - Downloaded dependencies are stored in the cache. The crates are compressed gzip archives named with a `.crate` extension. - - * `registry/src` - If a downloaded `.crate` archive is required by a package, it is unpacked into `registry/src` folder where rustc will find the `.rs` files. - - -## Caching the Cargo home in CI - -To avoid redownloading all crate dependencies during continuous integration, you can cache the `$CARGO_HOME` directory. -However, caching the entire directory is often inefficient as it will contain downloaded sources twice. -If we depend on a crate such as `serde 1.0.92` and cache the entire `$CARGO_HOME` we would actually cache the sources twice, the `serde-1.0.92.crate` inside `registry/cache` and the extracted `.rs` files of serde inside `registry/src`. -That can unnecessarily slow down the build as downloading, extracting, recompressing and reuploading the cache to the CI servers can take some time. - -It should be sufficient to only cache the following directories across builds: - -* `bin/` -* `registry/index/` -* `registry/cache/` -* `git/db/` - - - -## Vendoring all dependencies of a project - -See the [`cargo vendor`] subcommand. - - - -## Clearing the cache - -In theory, you can always remove any part of the cache and Cargo will do its best to restore sources if a crate needs them either by reextracting an archive or checking out a bare repo or by simply redownloading the sources from the web. - -Alternatively, the [cargo-cache](https://crates.io/crates/cargo-cache) crate provides a simple CLI tool to only clear selected parts of the cache or show sizes of its components in your command-line. - -[`cargo install`]: ../commands/cargo-install.md -[`cargo login`]: ../commands/cargo-login.md -[`cargo vendor`]: ../commands/cargo-vendor.md -[config]: ../reference/config.md -[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' -[def-registry]: ../appendix/glossary.md#registry '"registry" (glossary entry)' -[env]: ../reference/environment-variables.md diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md deleted file mode 100644 index 80e3aa8d1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/cargo-toml-vs-cargo-lock.md +++ /dev/null @@ -1,106 +0,0 @@ -## Cargo.toml vs Cargo.lock - -`Cargo.toml` and `Cargo.lock` serve two different purposes. Before we talk -about them, hereโ€™s a summary: - -* `Cargo.toml` is about describing your dependencies in a broad sense, and is - written by you. -* `Cargo.lock` contains exact information about your dependencies. It is - maintained by Cargo and should not be manually edited. - -If youโ€™re building a non-end product, such as a rust library that other rust -[packages][def-package] will depend on, put `Cargo.lock` in your -`.gitignore`. If youโ€™re building an end product, which are executable like -command-line tool or an application, or a system library with crate-type of -`staticlib` or `cdylib`, check `Cargo.lock` into `git`. If you're curious -about why that is, see -["Why do binaries have `Cargo.lock` in version control, but not libraries?" in the -FAQ](../faq.md#why-do-binaries-have-cargolock-in-version-control-but-not-libraries). - -Letโ€™s dig in a little bit more. - -`Cargo.toml` is a [**manifest**][def-manifest] file in which we can specify a -bunch of different metadata about our package. For example, we can say that we -depend on another package: - -```toml -[package] -name = "hello_world" -version = "0.1.0" - -[dependencies] -regex = { git = "https://github.com/rust-lang/regex.git" } -``` - -This package has a single dependency, on the `regex` library. Weโ€™ve stated in -this case that weโ€™re relying on a particular Git repository that lives on -GitHub. Since we havenโ€™t specified any other information, Cargo assumes that -we intend to use the latest commit on the `master` branch to build our package. - -Sound good? Well, thereโ€™s one problem: If you build this package today, and -then you send a copy to me, and I build this package tomorrow, something bad -could happen. There could be more commits to `regex` in the meantime, and my -build would include new commits while yours would not. Therefore, we would -get different builds. This would be bad because we want reproducible builds. - -We could fix this problem by putting a `rev` line in our `Cargo.toml`: - -```toml -[dependencies] -regex = { git = "https://github.com/rust-lang/regex.git", rev = "9f9f693" } -``` - -Now our builds will be the same. But thereโ€™s a big drawback: now we have to -manually think about SHA-1s every time we want to update our library. This is -both tedious and error prone. - -Enter the `Cargo.lock`. Because of its existence, we donโ€™t need to manually -keep track of the exact revisions: Cargo will do it for us. When we have a -manifest like this: - -```toml -[package] -name = "hello_world" -version = "0.1.0" - -[dependencies] -regex = { git = "https://github.com/rust-lang/regex.git" } -``` - -Cargo will take the latest commit and write that information out into our -`Cargo.lock` when we build for the first time. That file will look like this: - -```toml -[[package]] -name = "hello_world" -version = "0.1.0" -dependencies = [ - "regex 1.5.0 (git+https://github.com/rust-lang/regex.git#9f9f693768c584971a4d53bc3c586c33ed3a6831)", -] - -[[package]] -name = "regex" -version = "1.5.0" -source = "git+https://github.com/rust-lang/regex.git#9f9f693768c584971a4d53bc3c586c33ed3a6831" -``` - -You can see that thereโ€™s a lot more information here, including the exact -revision we used to build. Now when you give your package to someone else, -theyโ€™ll use the exact same SHA, even though we didnโ€™t specify it in our -`Cargo.toml`. - -When weโ€™re ready to opt in to a new version of the library, Cargo can -re-calculate the dependencies and update things for us: - -```console -$ cargo update # updates all dependencies -$ cargo update -p regex # updates just โ€œregexโ€ -``` - -This will write out a new `Cargo.lock` with the new version information. Note -that the argument to `cargo update` is actually a -[Package ID Specification](../reference/pkgid-spec.md) and `regex` is just a -short specification. - -[def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)' -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/continuous-integration.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/continuous-integration.md deleted file mode 100644 index ccaf1ba8e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/continuous-integration.md +++ /dev/null @@ -1,125 +0,0 @@ -## Continuous Integration - -### Travis CI - -To test your [package][def-package] on Travis CI, here is a sample -`.travis.yml` file: - -```yaml -language: rust -rust: - - stable - - beta - - nightly -matrix: - allow_failures: - - rust: nightly -``` - -This will test all three release channels, but any breakage in nightly -will not fail your overall build. Please see the [Travis CI Rust -documentation](https://docs.travis-ci.com/user/languages/rust/) for more -information. - -### GitHub Actions - -To test your package on GitHub Actions, here is a sample `.github/workflows/ci.yml` file: - -```yaml -name: Cargo Build & Test - -on: - push: - pull_request: - -env: - CARGO_TERM_COLOR: always - -jobs: - build_and_test: - name: Rust project - latest - runs-on: ubuntu-latest - strategy: - matrix: - toolchain: - - stable - - beta - - nightly - steps: - - uses: actions/checkout@v2 - - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} - - run: cargo build --verbose - - run: cargo test --verbose - -``` - -This will test all three release channels (note a failure in any toolchain version will fail the entire job). You can also click `"Actions" > "new workflow"` in the GitHub UI and select Rust to add the [default configuration](https://github.com/actions/starter-workflows/blob/main/ci/rust.yml) to your repo. See [GitHub Actions documentation](https://docs.github.com/en/actions) for more information. - -### GitLab CI - -To test your package on GitLab CI, here is a sample `.gitlab-ci.yml` file: - -```yaml -stages: - - build - -rust-latest: - stage: build - image: rust:latest - script: - - cargo build --verbose - - cargo test --verbose - -rust-nightly: - stage: build - image: rustlang/rust:nightly - script: - - cargo build --verbose - - cargo test --verbose - allow_failure: true -``` - -This will test on the stable channel and nightly channel, but any -breakage in nightly will not fail your overall build. Please see the -[GitLab CI documentation](https://docs.gitlab.com/ce/ci/yaml/index.html) for more -information. - -### builds.sr.ht - -To test your package on sr.ht, here is a sample `.build.yml` file. -Be sure to change `` and `` to the repo to clone and -the directory where it was cloned. - -```yaml -image: archlinux -packages: - - rustup -sources: - - -tasks: - - setup: | - rustup toolchain install nightly stable - cd / - rustup run stable cargo fetch - - stable: | - rustup default stable - cd / - cargo build --verbose - cargo test --verbose - - nightly: | - rustup default nightly - cd / - cargo build --verbose ||: - cargo test --verbose ||: - - docs: | - cd / - rustup run stable cargo doc --no-deps - rustup run nightly cargo doc --no-deps ||: -``` - -This will test and build documentation on the stable channel and nightly -channel, but any breakage in nightly will not fail your overall build. Please -see the [builds.sr.ht documentation](https://man.sr.ht/builds.sr.ht/) for more -information. - -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/creating-a-new-project.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/creating-a-new-project.md deleted file mode 100644 index e0daefc6b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/creating-a-new-project.md +++ /dev/null @@ -1,97 +0,0 @@ -## Creating a New Package - -To start a new [package][def-package] with Cargo, use `cargo new`: - -```console -$ cargo new hello_world --bin -``` - -Weโ€™re passing `--bin` because weโ€™re making a binary program: if we -were making a library, weโ€™d pass `--lib`. This also initializes a new `git` -repository by default. If you don't want it to do that, pass `--vcs none`. - -Letโ€™s check out what Cargo has generated for us: - -```console -$ cd hello_world -$ tree . -. -โ”œโ”€โ”€ Cargo.toml -โ””โ”€โ”€ src - โ””โ”€โ”€ main.rs - -1 directory, 2 files -``` - -Letโ€™s take a closer look at `Cargo.toml`: - -```toml -[package] -name = "hello_world" -version = "0.1.0" -edition = "2021" - -[dependencies] - -``` - -This is called a [***manifest***][def-manifest], and it contains all of the -metadata that Cargo needs to compile your package. This file is written in the -[TOML] format (pronounced /tษ‘mษ™l/). - -Hereโ€™s whatโ€™s in `src/main.rs`: - -```rust -fn main() { - println!("Hello, world!"); -} -``` - -Cargo generated a โ€œhello worldโ€ program for us, otherwise known as a -[*binary crate*][def-crate]. Letโ€™s compile it: - -```console -$ cargo build - Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) -``` - -And then run it: - -```console -$ ./target/debug/hello_world -Hello, world! -``` - -We can also use `cargo run` to compile and then run it, all in one step (You -won't see the `Compiling` line if you have not made any changes since you last -compiled): - -```console -$ cargo run - Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) - Running `target/debug/hello_world` -Hello, world! -``` - -Youโ€™ll now notice a new file, `Cargo.lock`. It contains information about our -dependencies. Since we donโ€™t have any yet, itโ€™s not very interesting. - -Once youโ€™re ready for release, you can use `cargo build --release` to compile -your files with optimizations turned on: - -```console -$ cargo build --release - Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) -``` - -`cargo build --release` puts the resulting binary in `target/release` instead of -`target/debug`. - -Compiling in debug mode is the default for development. Compilation time is -shorter since the compiler doesn't do optimizations, but the code will run -slower. Release mode takes longer to compile, but the code will run faster. - -[TOML]: https://toml.io/ -[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' -[def-manifest]: ../appendix/glossary.md#manifest '"manifest" (glossary entry)' -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/dependencies.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/dependencies.md deleted file mode 100644 index 94419f15b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/dependencies.md +++ /dev/null @@ -1,93 +0,0 @@ -## Dependencies - -[crates.io] is the Rust community's central [*package registry*][def-package-registry] -that serves as a location to discover and download -[packages][def-package]. `cargo` is configured to use it by default to find -requested packages. - -To depend on a library hosted on [crates.io], add it to your `Cargo.toml`. - -[crates.io]: https://crates.io/ - -### Adding a dependency - -If your `Cargo.toml` doesn't already have a `[dependencies]` section, add -that, then list the [crate][def-crate] name and version that you would like to -use. This example adds a dependency of the `time` crate: - -```toml -[dependencies] -time = "0.1.12" -``` - -The version string is a [SemVer] version requirement. The [specifying -dependencies](../reference/specifying-dependencies.md) docs have more information about -the options you have here. - -[SemVer]: https://semver.org - -If we also wanted to add a dependency on the `regex` crate, we would not need -to add `[dependencies]` for each crate listed. Here's what your whole -`Cargo.toml` file would look like with dependencies on the `time` and `regex` -crates: - -```toml -[package] -name = "hello_world" -version = "0.1.0" -edition = "2021" - -[dependencies] -time = "0.1.12" -regex = "0.1.41" -``` - -Re-run `cargo build`, and Cargo will fetch the new dependencies and all of -their dependencies, compile them all, and update the `Cargo.lock`: - -```console -$ cargo build - Updating crates.io index - Downloading memchr v0.1.5 - Downloading libc v0.1.10 - Downloading regex-syntax v0.2.1 - Downloading memchr v0.1.5 - Downloading aho-corasick v0.3.0 - Downloading regex v0.1.41 - Compiling memchr v0.1.5 - Compiling libc v0.1.10 - Compiling regex-syntax v0.2.1 - Compiling memchr v0.1.5 - Compiling aho-corasick v0.3.0 - Compiling regex v0.1.41 - Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) -``` - -Our `Cargo.lock` contains the exact information about which revision of all of -these dependencies we used. - -Now, if `regex` gets updated, we will still build with the same revision until -we choose to `cargo update`. - -You can now use the `regex` library in `main.rs`. - -```rust,ignore -use regex::Regex; - -fn main() { - let re = Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap(); - println!("Did our date match? {}", re.is_match("2014-01-01")); -} -``` - -Running it will show: - -```console -$ cargo run - Running `target/hello_world` -Did our date match? true -``` - -[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' -[def-package-registry]: ../appendix/glossary.md#package-registry '"package-registry" (glossary entry)' diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/index.md deleted file mode 100644 index fe6d86a39..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/index.md +++ /dev/null @@ -1,15 +0,0 @@ -## Cargo Guide - -This guide will give you all that you need to know about how to use Cargo to -develop Rust packages. - -* [Why Cargo Exists](why-cargo-exists.md) -* [Creating a New Package](creating-a-new-project.md) -* [Working on an Existing Cargo Package](working-on-an-existing-project.md) -* [Dependencies](dependencies.md) -* [Package Layout](project-layout.md) -* [Cargo.toml vs Cargo.lock](cargo-toml-vs-cargo-lock.md) -* [Tests](tests.md) -* [Continuous Integration](continuous-integration.md) -* [Cargo Home](cargo-home.md) -* [Build Cache](build-cache.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/project-layout.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/project-layout.md deleted file mode 100644 index a3ce3f8a7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/project-layout.md +++ /dev/null @@ -1,61 +0,0 @@ -## Package Layout - -Cargo uses conventions for file placement to make it easy to dive into a new -Cargo [package][def-package]: - -```text -. -โ”œโ”€โ”€ Cargo.lock -โ”œโ”€โ”€ Cargo.toml -โ”œโ”€โ”€ src/ -โ”‚ย ย  โ”œโ”€โ”€ lib.rs -โ”‚ย ย  โ”œโ”€โ”€ main.rs -โ”‚ย ย  โ””โ”€โ”€ bin/ -โ”‚ โ”œโ”€โ”€ named-executable.rs -โ”‚ย ย  ย ย  โ”œโ”€โ”€ another-executable.rs -โ”‚ย ย  ย ย  โ””โ”€โ”€ multi-file-executable/ -โ”‚ย ย  ย ย  โ”œโ”€โ”€ main.rs -โ”‚ย ย  ย ย  โ””โ”€โ”€ some_module.rs -โ”œโ”€โ”€ benches/ -โ”‚ย ย  โ”œโ”€โ”€ large-input.rs -โ”‚ย ย  โ””โ”€โ”€ multi-file-bench/ -โ”‚ย ย  โ”œโ”€โ”€ main.rs -โ”‚ย ย  โ””โ”€โ”€ bench_module.rs -โ”œโ”€โ”€ examples/ -โ”‚ย ย  โ”œโ”€โ”€ simple.rs -โ”‚ย ย  โ””โ”€โ”€ multi-file-example/ -โ”‚ย ย  โ”œโ”€โ”€ main.rs -โ”‚ย ย  โ””โ”€โ”€ ex_module.rs -โ””โ”€โ”€ tests/ - โ”œโ”€โ”€ some-integration-tests.rs - โ””โ”€โ”€ multi-file-test/ - โ”œโ”€โ”€ main.rs - โ””โ”€โ”€ test_module.rs -``` - -* `Cargo.toml` and `Cargo.lock` are stored in the root of your package (*package - root*). -* Source code goes in the `src` directory. -* The default library file is `src/lib.rs`. -* The default executable file is `src/main.rs`. - * Other executables can be placed in `src/bin/`. -* Benchmarks go in the `benches` directory. -* Examples go in the `examples` directory. -* Integration tests go in the `tests` directory. - -If a binary, example, bench, or integration test consists of multiple source -files, place a `main.rs` file along with the extra [*modules*][def-module] -within a subdirectory of the `src/bin`, `examples`, `benches`, or `tests` -directory. The name of the executable will be the directory name. - -You can learn more about Rust's module system in [the book][book-modules]. - -See [Configuring a target] for more details on manually configuring targets. -See [Target auto-discovery] for more information on controlling how Cargo -automatically infers target names. - -[book-modules]: ../../book/ch07-00-managing-growing-projects-with-packages-crates-and-modules.html -[Configuring a target]: ../reference/cargo-targets.md#configuring-a-target -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' -[def-module]: ../appendix/glossary.md#module '"module" (glossary entry)' -[Target auto-discovery]: ../reference/cargo-targets.md#target-auto-discovery diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/tests.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/tests.md deleted file mode 100644 index 50ee6ddb0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/tests.md +++ /dev/null @@ -1,40 +0,0 @@ -## Tests - -Cargo can run your tests with the `cargo test` command. Cargo looks for tests -to run in two places: in each of your `src` files and any tests in `tests/`. -Tests in your `src` files should be unit tests, and tests in `tests/` should be -integration-style tests. As such, youโ€™ll need to import your crates into -the files in `tests`. - -Here's an example of running `cargo test` in our [package][def-package], which -currently has no tests: - -```console -$ cargo test - Compiling regex v1.5.0 (https://github.com/rust-lang/regex.git#9f9f693) - Compiling hello_world v0.1.0 (file:///path/to/package/hello_world) - Running target/test/hello_world-9c2b65bbb79eabce - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out -``` - -If our package had tests, we would see more output with the correct number of -tests. - -You can also run a specific test by passing a filter: - -```console -$ cargo test foo -``` - -This will run any test with `foo` in its name. - -`cargo test` runs additional checks as well. It will compile any -examples youโ€™ve included and will also test the examples in your -documentation. Please see the [testing guide][testing] in the Rust -documentation for more details. - -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' -[testing]: ../../book/ch11-00-testing.html diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/why-cargo-exists.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/why-cargo-exists.md deleted file mode 100644 index 02b222f01..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/why-cargo-exists.md +++ /dev/null @@ -1,65 +0,0 @@ -## Why Cargo Exists - -### Preliminaries - -In Rust, as you may know, a library or executable program is called a -[*crate*][def-crate]. Crates are compiled using the Rust compiler, -`rustc`. When starting with Rust, the first source code most people encounter -is that of the venerable โ€œhello worldโ€ program, which they compile by invoking -`rustc` directly: - -```console -$ rustc hello.rs -$ ./hello -Hello, world! -``` - -Note that the above command required that we specify the file name -explicitly. If we were to directly use `rustc` to compile a different program, -a different command line invocation would be required. If we needed to specify -any specific compiler flags or include external dependencies, then the -needed command would be even more specific (and elaborate). - -Furthermore, most non-trivial programs will likely have dependencies on -external libraries, and will therefore also depend transitively on *their* -dependencies. Obtaining the correct versions of all the necessary dependencies -and keeping them up to date would be laborious and error-prone if done by -hand. - -Rather than work only with crates and `rustc`, we can avoid the manual tedium -involved with performing the above tasks by introducing a higher-level -["*package*"][def-package] abstraction and by using a -[*package manager*][def-package-manager]. - -### Enter: Cargo - -*Cargo* is the Rust package manager. It is a tool that allows Rust -[*packages*][def-package] to declare their various dependencies and ensure -that youโ€™ll always get a repeatable build. - -To accomplish this goal, Cargo does four things: - -* Introduces two metadata files with various bits of package information. -* Fetches and builds your packageโ€™s dependencies. -* Invokes `rustc` or another build tool with the correct parameters to build - your package. -* Introduces conventions to make working with Rust packages easier. - -To a large extent, Cargo normalizes the commands needed to build a given -program or library; this is one aspect to the above mentioned conventions. As -we show later, the same command can be used to build different -[*artifacts*][def-artifact], regardless of their names. Rather than invoke -`rustc` directly, we can instead invoke something generic such as `cargo -build` and let cargo worry about constructing the correct `rustc` -invocation. Furthermore, Cargo will automatically fetch from a -[*registry*][def-registry] any dependencies we have defined for our artifact, -and arrange for them to be incorporated into our build as needed. - -It is only a slight exaggeration to say that once you know how to build one -Cargo-based project, you know how to build *all* of them. - -[def-artifact]: ../appendix/glossary.md#artifact '"artifact" (glossary entry)' -[def-crate]: ../appendix/glossary.md#crate '"crate" (glossary entry)' -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' -[def-package-manager]: ../appendix/glossary.md#package-manager '"package manager" (glossary entry)' -[def-registry]: ../appendix/glossary.md#registry '"registry" (glossary entry)' diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/working-on-an-existing-project.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/working-on-an-existing-project.md deleted file mode 100644 index f9c26cd90..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/guide/working-on-an-existing-project.md +++ /dev/null @@ -1,24 +0,0 @@ -## Working on an Existing Cargo Package - -If you download an existing [package][def-package] that uses Cargo, itโ€™s -really easy to get going. - -First, get the package from somewhere. In this example, weโ€™ll use `regex` -cloned from its repository on GitHub: - -```console -$ git clone https://github.com/rust-lang/regex.git -$ cd regex -``` - -To build, use `cargo build`: - -```console -$ cargo build - Compiling regex v1.5.0 (file:///path/to/package/regex) -``` - -This will fetch all of the dependencies and then build them, along with the -package. - -[def-package]: ../appendix/glossary.md#package '"package" (glossary entry)' diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/Cargo-Logo-Small.png b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/Cargo-Logo-Small.png deleted file mode 100644 index e3a99208c..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/Cargo-Logo-Small.png and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/auth-level-acl.png b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/auth-level-acl.png deleted file mode 100644 index e7bc25180..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/auth-level-acl.png and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/org-level-acl.png b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/org-level-acl.png deleted file mode 100644 index ed5aa882a..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/org-level-acl.png and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/winapi-features.svg b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/winapi-features.svg deleted file mode 100644 index 32327ad1d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/images/winapi-features.svg +++ /dev/null @@ -1,3 +0,0 @@ - - -
foo
foo
bar
bar
winapi

features:
โ€ข fileapi
โ€ข handleapi
โ€ข std
โ€ข winnt
winapi...
fileapi, handleapi
fileapi, handleapi
std, winnt
std, winnt
my-package
my-package
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/index.md deleted file mode 100644 index 708514929..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/index.md +++ /dev/null @@ -1,49 +0,0 @@ -# The Cargo Book - -![Cargo Logo](images/Cargo-Logo-Small.png) - -Cargo is the [Rust] [*package manager*][def-package-manager]. Cargo downloads your Rust [package][def-package]'s -dependencies, compiles your packages, makes distributable packages, and uploads them to -[crates.io], the Rust communityโ€™s [*package registry*][def-package-registry]. You can contribute -to this book on [GitHub]. - - -### Sections - -**[Getting Started](getting-started/index.md)** - -To get started with Cargo, install Cargo (and Rust) and set up your first -[*crate*][def-crate]. - -**[Cargo Guide](guide/index.md)** - -The guide will give you all you need to know about how to use Cargo to develop -Rust packages. - -**[Cargo Reference](reference/index.md)** - -The reference covers the details of various areas of Cargo. - -**[Cargo Commands](commands/index.md)** - -The commands will let you interact with Cargo using its command-line interface. - -**[Frequently Asked Questions](faq.md)** - -**Appendices:** -* [Glossary](appendix/glossary.md) -* [Git Authentication](appendix/git-authentication.md) - -**Other Documentation:** -* [Changelog](https://github.com/rust-lang/cargo/blob/master/CHANGELOG.md) โ€” - Detailed notes about changes in Cargo in each release. -* [Rust documentation website](https://doc.rust-lang.org/) โ€” Links to official - Rust documentation and tools. - -[def-crate]: ./appendix/glossary.md#crate '"crate" (glossary entry)' -[def-package]: ./appendix/glossary.md#package '"package" (glossary entry)' -[def-package-manager]: ./appendix/glossary.md#package-manager '"package manager" (glossary entry)' -[def-package-registry]: ./appendix/glossary.md#package-registry '"package registry" (glossary entry)' -[rust]: https://www.rust-lang.org/ -[crates.io]: https://crates.io/ -[GitHub]: https://github.com/rust-lang/cargo/tree/master/src/doc diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/build-script-examples.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/build-script-examples.md deleted file mode 100644 index 0db3bf726..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/build-script-examples.md +++ /dev/null @@ -1,505 +0,0 @@ -## Build Script Examples - -The following sections illustrate some examples of writing build scripts. - -Some common build script functionality can be found via crates on [crates.io]. -Check out the [`build-dependencies` -keyword](https://crates.io/keywords/build-dependencies) to see what is -available. The following is a sample of some popular crates[^โ€ ]: - -* [`bindgen`](https://crates.io/crates/bindgen) โ€” Automatically generate Rust - FFI bindings to C libraries. -* [`cc`](https://crates.io/crates/cc) โ€” Compiles C/C++/assembly. -* [`pkg-config`](https://crates.io/crates/pkg-config) โ€” Detect system - libraries using the `pkg-config` utility. -* [`cmake`](https://crates.io/crates/cmake) โ€” Runs the `cmake` build tool to build a native library. -* [`autocfg`](https://crates.io/crates/autocfg), - [`rustc_version`](https://crates.io/crates/rustc_version), - [`version_check`](https://crates.io/crates/version_check) โ€” These crates - provide ways to implement conditional compilation based on the current - `rustc` such as the version of the compiler. - -[^โ€ ]: This list is not an endorsement. Evaluate your dependencies to see which -is right for your project. - -### Code generation - -Some Cargo packages need to have code generated just before they are compiled -for various reasons. Here weโ€™ll walk through a simple example which generates a -library call as part of the build script. - -First, letโ€™s take a look at the directory structure of this package: - -```text -. -โ”œโ”€โ”€ Cargo.toml -โ”œโ”€โ”€ build.rs -โ””โ”€โ”€ src - โ””โ”€โ”€ main.rs - -1 directory, 3 files -``` - -Here we can see that we have a `build.rs` build script and our binary in -`main.rs`. This package has a basic manifest: - -```toml -# Cargo.toml - -[package] -name = "hello-from-generated-code" -version = "0.1.0" -``` - -Letโ€™s see whatโ€™s inside the build script: - -```rust,no_run -// build.rs - -use std::env; -use std::fs; -use std::path::Path; - -fn main() { - let out_dir = env::var_os("OUT_DIR").unwrap(); - let dest_path = Path::new(&out_dir).join("hello.rs"); - fs::write( - &dest_path, - "pub fn message() -> &'static str { - \"Hello, World!\" - } - " - ).unwrap(); - println!("cargo:rerun-if-changed=build.rs"); -} -``` - -Thereโ€™s a couple of points of note here: - -* The script uses the `OUT_DIR` environment variable to discover where the - output files should be located. It can use the processโ€™ current working - directory to find where the input files should be located, but in this case we - donโ€™t have any input files. -* In general, build scripts should not modify any files outside of `OUT_DIR`. - It may seem fine on the first blush, but it does cause problems when you use - such crate as a dependency, because there's an *implicit* invariant that - sources in `.cargo/registry` should be immutable. `cargo` won't allow such - scripts when packaging. -* This script is relatively simple as it just writes out a small generated file. - One could imagine that other more fanciful operations could take place such as - generating a Rust module from a C header file or another language definition, - for example. -* The [`rerun-if-changed` instruction](build-scripts.md#rerun-if-changed) - tells Cargo that the build script only needs to re-run if the build script - itself changes. Without this line, Cargo will automatically run the build - script if any file in the package changes. If your code generation uses some - input files, this is where you would print a list of each of those files. - -Next, letโ€™s peek at the library itself: - -```rust,ignore -// src/main.rs - -include!(concat!(env!("OUT_DIR"), "/hello.rs")); - -fn main() { - println!("{}", message()); -} -``` - -This is where the real magic happens. The library is using the rustc-defined -[`include!` macro][include-macro] in combination with the -[`concat!`][concat-macro] and [`env!`][env-macro] macros to include the -generated file (`hello.rs`) into the crateโ€™s compilation. - -Using the structure shown here, crates can include any number of generated files -from the build script itself. - -[include-macro]: ../../std/macro.include.html -[concat-macro]: ../../std/macro.concat.html -[env-macro]: ../../std/macro.env.html - -### Building a native library - -Sometimes itโ€™s necessary to build some native C or C++ code as part of a -package. This is another excellent use case of leveraging the build script to -build a native library before the Rust crate itself. As an example, weโ€™ll create -a Rust library which calls into C to print โ€œHello, World!โ€. - -Like above, letโ€™s first take a look at the package layout: - -```text -. -โ”œโ”€โ”€ Cargo.toml -โ”œโ”€โ”€ build.rs -โ””โ”€โ”€ src - โ”œโ”€โ”€ hello.c - โ””โ”€โ”€ main.rs - -1 directory, 4 files -``` - -Pretty similar to before! Next, the manifest: - -```toml -# Cargo.toml - -[package] -name = "hello-world-from-c" -version = "0.1.0" -edition = "2021" -``` - -For now weโ€™re not going to use any build dependencies, so letโ€™s take a look at -the build script now: - -```rust,no_run -// build.rs - -use std::process::Command; -use std::env; -use std::path::Path; - -fn main() { - let out_dir = env::var("OUT_DIR").unwrap(); - - // Note that there are a number of downsides to this approach, the comments - // below detail how to improve the portability of these commands. - Command::new("gcc").args(&["src/hello.c", "-c", "-fPIC", "-o"]) - .arg(&format!("{}/hello.o", out_dir)) - .status().unwrap(); - Command::new("ar").args(&["crus", "libhello.a", "hello.o"]) - .current_dir(&Path::new(&out_dir)) - .status().unwrap(); - - println!("cargo:rustc-link-search=native={}", out_dir); - println!("cargo:rustc-link-lib=static=hello"); - println!("cargo:rerun-if-changed=src/hello.c"); -} -``` - -This build script starts out by compiling our C file into an object file (by -invoking `gcc`) and then converting this object file into a static library (by -invoking `ar`). The final step is feedback to Cargo itself to say that our -output was in `out_dir` and the compiler should link the crate to `libhello.a` -statically via the `-l static=hello` flag. - -Note that there are a number of drawbacks to this hard-coded approach: - -* The `gcc` command itself is not portable across platforms. For example itโ€™s - unlikely that Windows platforms have `gcc`, and not even all Unix platforms - may have `gcc`. The `ar` command is also in a similar situation. -* These commands do not take cross-compilation into account. If weโ€™re cross - compiling for a platform such as Android itโ€™s unlikely that `gcc` will produce - an ARM executable. - -Not to fear, though, this is where a `build-dependencies` entry would help! -The Cargo ecosystem has a number of packages to make this sort of task much -easier, portable, and standardized. Let's try the [`cc` -crate](https://crates.io/crates/cc) from [crates.io]. First, add it to the -`build-dependencies` in `Cargo.toml`: - -```toml -[build-dependencies] -cc = "1.0" -``` - -And rewrite the build script to use this crate: - -```rust,ignore -// build.rs - -fn main() { - cc::Build::new() - .file("src/hello.c") - .compile("hello"); - println!("cargo:rerun-if-changed=src/hello.c"); -} -``` - -The [`cc` crate] abstracts a range of build script requirements for C code: - -* It invokes the appropriate compiler (MSVC for windows, `gcc` for MinGW, `cc` - for Unix platforms, etc.). -* It takes the `TARGET` variable into account by passing appropriate flags to - the compiler being used. -* Other environment variables, such as `OPT_LEVEL`, `DEBUG`, etc., are all - handled automatically. -* The stdout output and `OUT_DIR` locations are also handled by the `cc` - library. - -Here we can start to see some of the major benefits of farming as much -functionality as possible out to common build dependencies rather than -duplicating logic across all build scripts! - -Back to the case study though, letโ€™s take a quick look at the contents of the -`src` directory: - -```c -// src/hello.c - -#include - -void hello() { - printf("Hello, World!\n"); -} -``` - -```rust,ignore -// src/main.rs - -// Note the lack of the `#[link]` attribute. Weโ€™re delegating the responsibility -// of selecting what to link over to the build script rather than hard-coding -// it in the source file. -extern { fn hello(); } - -fn main() { - unsafe { hello(); } -} -``` - -And there we go! This should complete our example of building some C code from a -Cargo package using the build script itself. This also shows why using a build -dependency can be crucial in many situations and even much more concise! - -Weโ€™ve also seen a brief example of how a build script can use a crate as a -dependency purely for the build process and not for the crate itself at runtime. - -[`cc` crate]: https://crates.io/crates/cc - -### Linking to system libraries - -This example demonstrates how to link a system library and how the build -script is used to support this use case. - -Quite frequently a Rust crate wants to link to a native library provided on -the system to bind its functionality or just use it as part of an -implementation detail. This is quite a nuanced problem when it comes to -performing this in a platform-agnostic fashion. It is best, if possible, to -farm out as much of this as possible to make this as easy as possible for -consumers. - -For this example, we will be creating a binding to the system's zlib library. -This is a library that is commonly found on most Unix-like systems that -provides data compression. This is already wrapped up in the [`libz-sys` -crate], but for this example, we'll do an extremely simplified version. Check -out [the source code][libz-source] for the full example. - -To make it easy to find the location of the library, we will use the -[`pkg-config` crate]. This crate uses the system's `pkg-config` utility to -discover information about a library. It will automatically tell Cargo what is -needed to link the library. This will likely only work on Unix-like systems -with `pkg-config` installed. Let's start by setting up the manifest: - -```toml -# Cargo.toml - -[package] -name = "libz-sys" -version = "0.1.0" -edition = "2021" -links = "z" - -[build-dependencies] -pkg-config = "0.3.16" -``` - -Take note that we included the `links` key in the `package` table. This tells -Cargo that we are linking to the `libz` library. See ["Using another sys -crate"](#using-another-sys-crate) for an example that will leverage this. - -The build script is fairly simple: - -```rust,ignore -// build.rs - -fn main() { - pkg_config::Config::new().probe("zlib").unwrap(); - println!("cargo:rerun-if-changed=build.rs"); -} -``` - -Let's round out the example with a basic FFI binding: - -```rust,ignore -// src/lib.rs - -use std::os::raw::{c_uint, c_ulong}; - -extern "C" { - pub fn crc32(crc: c_ulong, buf: *const u8, len: c_uint) -> c_ulong; -} - -#[test] -fn test_crc32() { - let s = "hello"; - unsafe { - assert_eq!(crc32(0, s.as_ptr(), s.len() as c_uint), 0x3610a686); - } -} -``` - -Run `cargo build -vv` to see the output from the build script. On a system -with `libz` already installed, it may look something like this: - -```text -[libz-sys 0.1.0] cargo:rustc-link-search=native=/usr/lib -[libz-sys 0.1.0] cargo:rustc-link-lib=z -[libz-sys 0.1.0] cargo:rerun-if-changed=build.rs -``` - -Nice! `pkg-config` did all the work of finding the library and telling Cargo -where it is. - -It is not unusual for packages to include the source for the library, and -build it statically if it is not found on the system, or if a feature or -environment variable is set. For example, the real [`libz-sys` crate] checks the -environment variable `LIBZ_SYS_STATIC` or the `static` feature to build it -from source instead of using the system library. Check out [the -source][libz-source] for a more complete example. - -[`libz-sys` crate]: https://crates.io/crates/libz-sys -[`pkg-config` crate]: https://crates.io/crates/pkg-config -[libz-source]: https://github.com/rust-lang/libz-sys - -### Using another `sys` crate - -When using the `links` key, crates may set metadata that can be read by other -crates that depend on it. This provides a mechanism to communicate information -between crates. In this example, we'll be creating a C library that makes use -of zlib from the real [`libz-sys` crate]. - -If you have a C library that depends on zlib, you can leverage the [`libz-sys` -crate] to automatically find it or build it. This is great for cross-platform -support, such as Windows where zlib is not usually installed. `libz-sys` [sets -the `include` -metadata](https://github.com/rust-lang/libz-sys/blob/3c594e677c79584500da673f918c4d2101ac97a1/build.rs#L156) -to tell other packages where to find the header files for zlib. Our build -script can read that metadata with the `DEP_Z_INCLUDE` environment variable. -Here's an example: - -```toml -# Cargo.toml - -[package] -name = "zuser" -version = "0.1.0" -edition = "2021" - -[dependencies] -libz-sys = "1.0.25" - -[build-dependencies] -cc = "1.0.46" -``` - -Here we have included `libz-sys` which will ensure that there is only one -`libz` used in the final library, and give us access to it from our build -script: - -```rust,ignore -// build.rs - -fn main() { - let mut cfg = cc::Build::new(); - cfg.file("src/zuser.c"); - if let Some(include) = std::env::var_os("DEP_Z_INCLUDE") { - cfg.include(include); - } - cfg.compile("zuser"); - println!("cargo:rerun-if-changed=src/zuser.c"); -} -``` - -With `libz-sys` doing all the heavy lifting, the C source code may now include -the zlib header, and it should find the header, even on systems where it isn't -already installed. - -```c -// src/zuser.c - -#include "zlib.h" - -// โ€ฆ rest of code that makes use of zlib. -``` - -### Conditional compilation - -A build script may emit [`rustc-cfg` instructions] which can enable conditions -that can be checked at compile time. In this example, we'll take a look at how -the [`openssl` crate] uses this to support multiple versions of the OpenSSL -library. - -The [`openssl-sys` crate] implements building and linking the OpenSSL library. -It supports multiple different implementations (like LibreSSL) and multiple -versions. It makes use of the `links` key so that it may pass information to -other build scripts. One of the things it passes is the `version_number` key, -which is the version of OpenSSL that was detected. The code in the build -script looks something [like -this](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl-sys/build/main.rs#L216): - -```rust,ignore -println!("cargo:version_number={:x}", openssl_version); -``` - -This instruction causes the `DEP_OPENSSL_VERSION_NUMBER` environment variable -to be set in any crates that directly depend on `openssl-sys`. - -The `openssl` crate, which provides the higher-level interface, specifies -`openssl-sys` as a dependency. The `openssl` build script can read the -version information generated by the `openssl-sys` build script with the -`DEP_OPENSSL_VERSION_NUMBER` environment variable. It uses this to generate -some [`cfg` -values](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl/build.rs#L18-L36): - -```rust,ignore -// (portion of build.rs) - -if let Ok(version) = env::var("DEP_OPENSSL_VERSION_NUMBER") { - let version = u64::from_str_radix(&version, 16).unwrap(); - - if version >= 0x1_00_01_00_0 { - println!("cargo:rustc-cfg=ossl101"); - } - if version >= 0x1_00_02_00_0 { - println!("cargo:rustc-cfg=ossl102"); - } - if version >= 0x1_01_00_00_0 { - println!("cargo:rustc-cfg=ossl110"); - } - if version >= 0x1_01_00_07_0 { - println!("cargo:rustc-cfg=ossl110g"); - } - if version >= 0x1_01_01_00_0 { - println!("cargo:rustc-cfg=ossl111"); - } -} -``` - -These `cfg` values can then be used with the [`cfg` attribute] or the [`cfg` -macro] to conditionally include code. For example, SHA3 support was added in -OpenSSL 1.1.1, so it is [conditionally -excluded](https://github.com/sfackler/rust-openssl/blob/dc72a8e2c429e46c275e528b61a733a66e7877fc/openssl/src/hash.rs#L67-L85) -for older versions: - -```rust,ignore -// (portion of openssl crate) - -#[cfg(ossl111)] -pub fn sha3_224() -> MessageDigest { - unsafe { MessageDigest(ffi::EVP_sha3_224()) } -} -``` - -Of course, one should be careful when using this, since it makes the resulting -binary even more dependent on the build environment. In this example, if the -binary is distributed to another system, it may not have the exact same shared -libraries, which could cause problems. - -[`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute -[`cfg` macro]: ../../std/macro.cfg.html -[`rustc-cfg` instructions]: build-scripts.md#rustc-cfg -[`openssl` crate]: https://crates.io/crates/openssl -[`openssl-sys` crate]: https://crates.io/crates/openssl-sys - -[crates.io]: https://crates.io/ diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/build-scripts.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/build-scripts.md deleted file mode 100644 index 2c17cb2ee..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/build-scripts.md +++ /dev/null @@ -1,445 +0,0 @@ -## Build Scripts - -Some packages need to compile third-party non-Rust code, for example C -libraries. Other packages need to link to C libraries which can either be -located on the system or possibly need to be built from source. Others still -need facilities for functionality such as code generation before building (think -parser generators). - -Cargo does not aim to replace other tools that are well-optimized for these -tasks, but it does integrate with them with custom build scripts. Placing a -file named `build.rs` in the root of a package will cause Cargo to compile -that script and execute it just before building the package. - -```rust,ignore -// Example custom build script. -fn main() { - // Tell Cargo that if the given file changes, to rerun this build script. - println!("cargo:rerun-if-changed=src/hello.c"); - // Use the `cc` crate to build a C file and statically link it. - cc::Build::new() - .file("src/hello.c") - .compile("hello"); -} -``` - -Some example use cases of build scripts are: - -* Building a bundled C library. -* Finding a C library on the host system. -* Generating a Rust module from a specification. -* Performing any platform-specific configuration needed for the crate. - -The sections below describe how build scripts work, and the [examples -chapter](build-script-examples.md) shows a variety of examples on how to write -scripts. - -> Note: The [`package.build` manifest key](manifest.md#package-build) can be -> used to change the name of the build script, or disable it entirely. - -### Life Cycle of a Build Script - -Just before a package is built, Cargo will compile a build script into an -executable (if it has not already been built). It will then run the script, -which may perform any number of tasks. The script may communicate with Cargo -by printing specially formatted commands prefixed with `cargo:` to stdout. - -The build script will be rebuilt if any of its source files or dependencies -change. - -By default, Cargo will re-run the build script if any of the files in the -package changes. Typically it is best to use the `rerun-if` commands, -described in the [change detection](#change-detection) section below, to -narrow the focus of what triggers a build script to run again. - -Once the build script successfully finishes executing, the rest of the package -will be compiled. Scripts should exit with a non-zero exit code to halt the -build if there is an error, in which case the build script's output will be -displayed on the terminal. - -### Inputs to the Build Script - -When the build script is run, there are a number of inputs to the build script, -all passed in the form of [environment variables][build-env]. - -In addition to environment variables, the build scriptโ€™s current directory is -the source directory of the build scriptโ€™s package. - -[build-env]: environment-variables.md#environment-variables-cargo-sets-for-build-scripts - -### Outputs of the Build Script - -Build scripts may save any output files in the directory specified in the -[`OUT_DIR` environment variable][build-env]. Scripts should not modify any -files outside of that directory. - -Build scripts communicate with Cargo by printing to stdout. Cargo will -interpret each line that starts with `cargo:` as an instruction that will -influence compilation of the package. All other lines are ignored. - -The output of the script is hidden from the terminal during normal -compilation. If you would like to see the output directly in your terminal, -invoke Cargo as "very verbose" with the `-vv` flag. This only happens when the -build script is run. If Cargo determines nothing has changed, it will not -re-run the script, see [change detection](#change-detection) below for more. - -All the lines printed to stdout by a build script are written to a file like -`target/debug/build//output` (the precise location may depend on your -configuration). The stderr output is also saved in that same directory. - -The following is a summary of the instructions that Cargo recognizes, with each -one detailed below. - -* [`cargo:rerun-if-changed=PATH`](#rerun-if-changed) โ€” Tells Cargo when to - re-run the script. -* [`cargo:rerun-if-env-changed=VAR`](#rerun-if-env-changed) โ€” Tells Cargo when - to re-run the script. -* [`cargo:rustc-link-arg=FLAG`](#rustc-link-arg) โ€“ Passes custom flags to a - linker for benchmarks, binaries, `cdylib` crates, examples, and tests. -* [`cargo:rustc-link-arg-bin=BIN=FLAG`](#rustc-link-arg-bin) โ€“ Passes custom - flags to a linker for the binary `BIN`. -* [`cargo:rustc-link-arg-bins=FLAG`](#rustc-link-arg-bins) โ€“ Passes custom - flags to a linker for binaries. -* [`cargo:rustc-link-lib=[KIND=]NAME`](#rustc-link-lib) โ€” Adds a library to - link. -* [`cargo:rustc-link-search=[KIND=]PATH`](#rustc-link-search) โ€” Adds to the - library search path. -* [`cargo:rustc-flags=FLAGS`](#rustc-flags) โ€” Passes certain flags to the - compiler. -* [`cargo:rustc-cfg=KEY[="VALUE"]`](#rustc-cfg) โ€” Enables compile-time `cfg` - settings. -* [`cargo:rustc-env=VAR=VALUE`](#rustc-env) โ€” Sets an environment variable. -* [`cargo:rustc-cdylib-link-arg=FLAG`](#rustc-cdylib-link-arg) โ€” Passes custom - flags to a linker for cdylib crates. -* [`cargo:warning=MESSAGE`](#cargo-warning) โ€” Displays a warning on the - terminal. -* [`cargo:KEY=VALUE`](#the-links-manifest-key) โ€” Metadata, used by `links` - scripts. - - - -#### `cargo:rustc-link-arg=FLAG` - -The `rustc-link-arg` instruction tells Cargo to pass the [`-C link-arg=FLAG` -option][link-arg] to the compiler, but only when building supported targets -(benchmarks, binaries, `cdylib` crates, examples, and tests). Its usage is -highly platform specific. It is useful to set the shared library version or -linker script. - -[link-arg]: ../../rustc/codegen-options/index.md#link-arg - - -#### `cargo:rustc-link-arg-bin=BIN=FLAG` - -The `rustc-link-arg-bin` instruction tells Cargo to pass the [`-C -link-arg=FLAG` option][link-arg] to the compiler, but only when building -the binary target with name `BIN`. Its usage is highly platform specific. It is useful -to set a linker script or other linker options. - -[link-arg]: ../../rustc/codegen-options/index.md#link-arg - - -#### `cargo:rustc-link-arg-bins=FLAG` - -The `rustc-link-arg-bins` instruction tells Cargo to pass the [`-C -link-arg=FLAG` option][link-arg] to the compiler, but only when building a -binary target. Its usage is highly platform specific. It is useful -to set a linker script or other linker options. - -[link-arg]: ../../rustc/codegen-options/index.md#link-arg - - -#### `cargo:rustc-link-lib=[KIND=]NAME` - -The `rustc-link-lib` instruction tells Cargo to link the given library using -the compiler's [`-l` flag][option-link]. This is typically used to link a -native library using [FFI]. - -The `-l` flag is only passed to the library target of the package, unless -there is no library target, in which case it is passed to all targets. This is -done because all other targets have an implicit dependency on the library -target, and the given library to link should only be included once. This means -that if a package has both a library and a binary target, the *library* has -access to the symbols from the given lib, and the binary should access them -through the library target's public API. - -The optional `KIND` may be one of `dylib`, `static`, or `framework`. See the -[rustc book][option-link] for more detail. - -[option-link]: ../../rustc/command-line-arguments.md#option-l-link-lib -[FFI]: ../../nomicon/ffi.md - - -#### `cargo:rustc-link-search=[KIND=]PATH` - -The `rustc-link-search` instruction tells Cargo to pass the [`-L` -flag][option-search] to the compiler to add a directory to the library search -path. - -The optional `KIND` may be one of `dependency`, `crate`, `native`, -`framework`, or `all`. See the [rustc book][option-search] for more detail. - -These paths are also added to the [dynamic library search path environment -variable](environment-variables.md#dynamic-library-paths) if they are within -the `OUT_DIR`. Depending on this behavior is discouraged since this makes it -difficult to use the resulting binary. In general, it is best to avoid -creating dynamic libraries in a build script (using existing system libraries -is fine). - -[option-search]: ../../rustc/command-line-arguments.md#option-l-search-path - - -#### `cargo:rustc-flags=FLAGS` - -The `rustc-flags` instruction tells Cargo to pass the given space-separated -flags to the compiler. This only allows the `-l` and `-L` flags, and is -equivalent to using [`rustc-link-lib`](#rustc-link-lib) and -[`rustc-link-search`](#rustc-link-search). - - -#### `cargo:rustc-cfg=KEY[="VALUE"]` - -The `rustc-cfg` instruction tells Cargo to pass the given value to the -[`--cfg` flag][option-cfg] to the compiler. This may be used for compile-time -detection of features to enable [conditional compilation]. - -Note that this does *not* affect Cargo's dependency resolution. This cannot be -used to enable an optional dependency, or enable other Cargo features. - -Be aware that [Cargo features] use the form `feature="foo"`. `cfg` values -passed with this flag are not restricted to that form, and may provide just a -single identifier, or any arbitrary key/value pair. For example, emitting -`cargo:rustc-cfg=abc` will then allow code to use `#[cfg(abc)]` (note the lack -of `feature=`). Or an arbitrary key/value pair may be used with an `=` symbol -like `cargo:rustc-cfg=my_component="foo"`. The key should be a Rust -identifier, the value should be a string. - -[cargo features]: features.md -[conditional compilation]: ../../reference/conditional-compilation.md -[option-cfg]: ../../rustc/command-line-arguments.md#option-cfg - - -#### `cargo:rustc-env=VAR=VALUE` - -The `rustc-env` instruction tells Cargo to set the given environment variable -when compiling the package. The value can be then retrieved by the [`env!` -macro][env-macro] in the compiled crate. This is useful for embedding -additional metadata in crate's code, such as the hash of git HEAD or the -unique identifier of a continuous integration server. - -See also the [environment variables automatically included by -Cargo][env-cargo]. - -> **Note**: These environment variables are also set when running an -> executable with `cargo run` or `cargo test`. However, this usage is -> discouraged since it ties the executable to Cargo's execution environment. -> Normally, these environment variables should only be checked at compile-time -> with the `env!` macro. - -[env-macro]: ../../std/macro.env.html -[env-cargo]: environment-variables.md#environment-variables-cargo-sets-for-crates - - -#### `cargo:rustc-cdylib-link-arg=FLAG` - -The `rustc-cdylib-link-arg` instruction tells Cargo to pass the [`-C -link-arg=FLAG` option][link-arg] to the compiler, but only when building a -`cdylib` library target. Its usage is highly platform specific. It is useful -to set the shared library version or the runtime-path. - -[link-arg]: ../../rustc/codegen-options/index.md#link-arg - - -#### `cargo:warning=MESSAGE` - -The `warning` instruction tells Cargo to display a warning after the build -script has finished running. Warnings are only shown for `path` dependencies -(that is, those you're working on locally), so for example warnings printed -out in [crates.io] crates are not emitted by default. The `-vv` "very verbose" -flag may be used to have Cargo display warnings for all crates. - -### Build Dependencies - -Build scripts are also allowed to have dependencies on other Cargo-based crates. -Dependencies are declared through the `build-dependencies` section of the -manifest. - -```toml -[build-dependencies] -cc = "1.0.46" -``` - -The build script **does not** have access to the dependencies listed in the -`dependencies` or `dev-dependencies` section (theyโ€™re not built yet!). Also, -build dependencies are not available to the package itself unless also -explicitly added in the `[dependencies]` table. - -It is recommended to carefully consider each dependency you add, weighing -against the impact on compile time, licensing, maintenance, etc. Cargo will -attempt to reuse a dependency if it is shared between build dependencies and -normal dependencies. However, this is not always possible, for example when -cross-compiling, so keep that in consideration of the impact on compile time. - -### Change Detection - -When rebuilding a package, Cargo does not necessarily know if the build script -needs to be run again. By default, it takes a conservative approach of always -re-running the build script if any file within the package is changed (or the -list of files controlled by the [`exclude` and `include` fields]). For most -cases, this is not a good choice, so it is recommended that every build script -emit at least one of the `rerun-if` instructions (described below). If these -are emitted, then Cargo will only re-run the script if the given value has -changed. - -[`exclude` and `include` fields]: manifest.md#the-exclude-and-include-fields - - -#### `cargo:rerun-if-changed=PATH` - -The `rerun-if-changed` instruction tells Cargo to re-run the build script if -the file at the given path has changed. Currently, Cargo only uses the -filesystem last-modified "mtime" timestamp to determine if the file has -changed. It compares against an internal cached timestamp of when the build -script last ran. - -If the path points to a directory, it will scan the entire directory for -any modifications. - -If the build script inherently does not need to re-run under any circumstance, -then emitting `cargo:rerun-if-changed=build.rs` is a simple way to prevent it -from being re-run (otherwise, the default if no `rerun-if` instructions are -emitted is to scan the entire package directory for changes). Cargo -automatically handles whether or not the script itself needs to be recompiled, -and of course the script will be re-run after it has been recompiled. -Otherwise, specifying `build.rs` is redundant and unnecessary. - - -#### `cargo:rerun-if-env-changed=NAME` - -The `rerun-if-env-changed` instruction tells Cargo to re-run the build script -if the value of an environment variable of the given name has changed. - -Note that the environment variables here are intended for global environment -variables like `CC` and such, it is not necessary to use this for environment -variables like `TARGET` that Cargo sets. - - -### The `links` Manifest Key - -The `package.links` key may be set in the `Cargo.toml` manifest to declare -that the package links with the given native library. The purpose of this -manifest key is to give Cargo an understanding about the set of native -dependencies that a package has, as well as providing a principled system of -passing metadata between package build scripts. - -```toml -[package] -# ... -links = "foo" -``` - -This manifest states that the package links to the `libfoo` native library. -When using the `links` key, the package must have a build script, and the -build script should use the [`rustc-link-lib` instruction](#rustc-link-lib) to -link the library. - -Primarily, Cargo requires that there is at most one package per `links` value. -In other words, it is forbidden to have two packages link to the same native -library. This helps prevent duplicate symbols between crates. Note, however, -that there are [conventions in place](#-sys-packages) to alleviate this. - -As mentioned above in the output format, each build script can generate an -arbitrary set of metadata in the form of key-value pairs. This metadata is -passed to the build scripts of **dependent** packages. For example, if the -package `bar` depends on `foo`, then if `foo` generates `key=value` as part of -its build script metadata, then the build script of `bar` will have the -environment variables `DEP_FOO_KEY=value`. See the ["Using another `sys` -crate"][using-another-sys] for an example of -how this can be used. - -Note that metadata is only passed to immediate dependents, not transitive -dependents. - -[using-another-sys]: build-script-examples.md#using-another-sys-crate - -### `*-sys` Packages - -Some Cargo packages that link to system libraries have a naming convention of -having a `-sys` suffix. Any package named `foo-sys` should provide two major -pieces of functionality: - -* The library crate should link to the native library `libfoo`. This will often - probe the current system for `libfoo` before resorting to building from - source. -* The library crate should provide **declarations** for types and functions in - `libfoo`, but **not** higher-level abstractions. - -The set of `*-sys` packages provides a common set of dependencies for linking -to native libraries. There are a number of benefits earned from having this -convention of native-library-related packages: - -* Common dependencies on `foo-sys` alleviates the rule about one package per - value of `links`. -* Other `-sys` packages can take advantage of the `DEP_NAME_KEY=value` - environment variables to better integrate with other packages. See the - ["Using another `sys` crate"][using-another-sys] example. -* A common dependency allows centralizing logic on discovering `libfoo` itself - (or building it from source). -* These dependencies are easily [overridable](#overriding-build-scripts). - -It is common to have a companion package without the `-sys` suffix that -provides a safe, high-level abstractions on top of the sys package. For -example, the [`git2` crate] provides a high-level interface to the -[`libgit2-sys` crate]. - -[`git2` crate]: https://crates.io/crates/git2 -[`libgit2-sys` crate]: https://crates.io/crates/libgit2-sys - -### Overriding Build Scripts - -If a manifest contains a `links` key, then Cargo supports overriding the build -script specified with a custom library. The purpose of this functionality is to -prevent running the build script in question altogether and instead supply the -metadata ahead of time. - -To override a build script, place the following configuration in any acceptable -Cargo [configuration location](config.md). - -```toml -[target.x86_64-unknown-linux-gnu.foo] -rustc-link-lib = ["foo"] -rustc-link-search = ["/path/to/foo"] -rustc-flags = "-L /some/path" -rustc-cfg = ['key="value"'] -rustc-env = {key = "value"} -rustc-cdylib-link-arg = ["โ€ฆ"] -metadata_key1 = "value" -metadata_key2 = "value" -``` - -With this configuration, if a package declares that it links to `foo` then the -build script will **not** be compiled or run, and the metadata specified will -be used instead. - -The `warning`, `rerun-if-changed`, and `rerun-if-env-changed` keys should not -be used and will be ignored. - -### Jobserver - -Cargo and `rustc` use the [jobserver protocol], developed for GNU make, to -coordinate concurrency across processes. It is essentially a semaphore that -controls the number of jobs running concurrently. The concurrency may be set -with the `--jobs` flag, which defaults to the number of logical CPUs. - -Each build script inherits one job slot from Cargo, and should endeavor to -only use one CPU while it runs. If the script wants to use more CPUs in -parallel, it should use the [`jobserver` crate] to coordinate with Cargo. - -As an example, the [`cc` crate] may enable the optional `parallel` feature -which will use the jobserver protocol to attempt to build multiple C files -at the same time. - -[`cc` crate]: https://crates.io/crates/cc -[`jobserver` crate]: https://crates.io/crates/jobserver -[jobserver protocol]: http://make.mad-scientist.net/papers/jobserver-implementation/ -[crates.io]: https://crates.io/ diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/cargo-targets.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/cargo-targets.md deleted file mode 100644 index 04ca854ef..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/cargo-targets.md +++ /dev/null @@ -1,383 +0,0 @@ -## Cargo Targets - -Cargo packages consist of *targets* which correspond to source files which can -be compiled into a crate. Packages can have [library](#library), -[binary](#binaries), [example](#examples), [test](#tests), and -[benchmark](#benchmarks) targets. The list of targets can be configured in the -`Cargo.toml` manifest, often [inferred automatically](#target-auto-discovery) -by the [directory layout][package layout] of the source files. - -See [Configuring a target](#configuring-a-target) below for details on -configuring the settings for a target. - -### Library - -The library target defines a "library" that can be used and linked by other -libraries and executables. The filename defaults to `src/lib.rs`, and the name -of the library defaults to the name of the package. A package can have only -one library. The settings for the library can be [customized] in the `[lib]` -table in `Cargo.toml`. - -```toml -# Example of customizing the library in Cargo.toml. -[lib] -crate-type = ["cdylib"] -bench = false -``` - -### Binaries - -Binary targets are executable programs that can be run after being compiled. -The default binary filename is `src/main.rs`, which defaults to the name of -the package. Additional binaries are stored in the [`src/bin/` -directory][package layout]. The settings for each binary can be [customized] -in the `[[bin]]` tables in `Cargo.toml`. - -Binaries can use the public API of the package's library. They are also linked -with the [`[dependencies]`][dependencies] defined in `Cargo.toml`. - -You can run individual binaries with the [`cargo run`] command with the `--bin -` option. [`cargo install`] can be used to copy the executable to a -common location. - -```toml -# Example of customizing binaries in Cargo.toml. -[[bin]] -name = "cool-tool" -test = false -bench = false - -[[bin]] -name = "frobnicator" -required-features = ["frobnicate"] -``` - -### Examples - -Files located under the [`examples` directory][package layout] are example -uses of the functionality provided by the library. When compiled, they are -placed in the [`target/debug/examples` directory][build cache]. - -Examples can use the public API of the package's library. They are also linked -with the [`[dependencies]`][dependencies] and -[`[dev-dependencies]`][dev-dependencies] defined in `Cargo.toml`. - -By default, examples are executable binaries (with a `main()` function). You -can specify the [`crate-type` field](#the-crate-type-field) to make an example -be compiled as a library: - -```toml -[[example]] -name = "foo" -crate-type = ["staticlib"] -``` - -You can run individual executable examples with the [`cargo run`] command with -the `--example ` option. Library examples can be built with -[`cargo build`] with the `--example ` option. [`cargo install`] -with the `--example ` option can be used to copy executable -binaries to a common location. Examples are compiled by [`cargo test`] by -default to protect them from bit-rotting. Set [the `test` -field](#the-test-field) to `true` if you have `#[test]` functions in the -example that you want to run with [`cargo test`]. - -### Tests - -There are two styles of tests within a Cargo project: - -* *Unit tests* which are functions marked with the [`#[test]` - attribute][test-attribute] located within your library or binaries (or any - target enabled with [the `test` field](#the-test-field)). These tests have - access to private APIs located within the target they are defined in. -* *Integration tests* which is a separate executable binary, also containing - `#[test]` functions, which is linked with the project's library and has - access to its *public* API. - -Tests are run with the [`cargo test`] command. By default, Cargo and `rustc` -use the [libtest harness] which is responsible for collecting functions -annotated with the [`#[test]` attribute][test-attribute] and executing them in -parallel, reporting the success and failure of each test. See [the `harness` -field](#the-harness-field) if you want to use a different harness or test -strategy. - -[libtest harness]: ../../rustc/tests/index.html - -#### Integration tests - -Files located under the [`tests` directory][package layout] are integration -tests. When you run [`cargo test`], Cargo will compile each of these files as -a separate crate, and execute them. - -Integration tests can use the public API of the package's library. They are -also linked with the [`[dependencies]`][dependencies] and -[`[dev-dependencies]`][dev-dependencies] defined in `Cargo.toml`. - -If you want to share code among multiple integration tests, you can place it -in a separate module such as `tests/common/mod.rs` and then put `mod common;` -in each test to import it. - -Each integration test results in a separate executable binary, and [`cargo -test`] will run them serially. In some cases this can be inefficient, as it -can take longer to compile, and may not make full use of multiple CPUs when -running the tests. If you have a lot of integration tests, you may want to -consider creating a single integration test, and split the tests into multiple -modules. The libtest harness will automatically find all of the `#[test]` -annotated functions and run them in parallel. You can pass module names to -[`cargo test`] to only run the tests within that module. - -Binary targets are automatically built if there is an integration test. This -allows an integration test to execute the binary to exercise and test its -behavior. The `CARGO_BIN_EXE_` [environment variable] is set when the -integration test is built so that it can use the [`env` macro] to locate the -executable. - -[environment variable]: environment-variables.md#environment-variables-cargo-sets-for-crates -[`env` macro]: ../../std/macro.env.html - -### Benchmarks - -Benchmarks provide a way to test the performance of your code using the -[`cargo bench`] command. They follow the same structure as [tests](#tests), -with each benchmark function annotated with the `#[bench]` attribute. -Similarly to tests: - -* Benchmarks are placed in the [`benches` directory][package layout]. -* Benchmark functions defined in libraries and binaries have access to the - *private* API within the target they are defined in. Benchmarks in the - `benches` directory may use the *public* API. -* [The `bench` field](#the-bench-field) can be used to define which targets - are benchmarked by default. -* [The `harness` field](#the-harness-field) can be used to disable the - built-in harness. - -> **Note**: The [`#[bench]` -> attribute](../../unstable-book/library-features/test.html) is currently -> unstable and only available on the [nightly channel]. There are some -> packages available on [crates.io](https://crates.io/keywords/benchmark) that -> may help with running benchmarks on the stable channel, such as -> [Criterion](https://crates.io/crates/criterion). - -### Configuring a target - -All of the `[lib]`, `[[bin]]`, `[[example]]`, `[[test]]`, and `[[bench]]` -sections in `Cargo.toml` support similar configuration for specifying how a -target should be built. The double-bracket sections like `[[bin]]` are -[array-of-table of TOML](https://toml.io/en/v1.0.0-rc.3#array-of-tables), -which means you can write more than one `[[bin]]` section to make several -executables in your crate. You can only specify one library, so `[lib]` is a -normal TOML table. - -The following is an overview of the TOML settings for each target, with each -field described in detail below. - -```toml -[lib] -name = "foo" # The name of the target. -path = "src/lib.rs" # The source file of the target. -test = true # Is tested by default. -doctest = true # Documentation examples are tested by default. -bench = true # Is benchmarked by default. -doc = true # Is documented by default. -plugin = false # Used as a compiler plugin (deprecated). -proc-macro = false # Set to `true` for a proc-macro library. -harness = true # Use libtest harness. -edition = "2015" # The edition of the target. -crate-type = ["lib"] # The crate types to generate. -required-features = [] # Features required to build this target (N/A for lib). -``` - -#### The `name` field - -The `name` field specifies the name of the target, which corresponds to the -filename of the artifact that will be generated. For a library, this is the -crate name that dependencies will use to reference it. - -For the `[lib]` and the default binary (`src/main.rs`), this defaults to the -name of the package, with any dashes replaced with underscores. For other -[auto discovered](#target-auto-discovery) targets, it defaults to the -directory or file name. - -This is required for all targets except `[lib]`. - -#### The `path` field - -The `path` field specifies where the source for the crate is located, relative -to the `Cargo.toml` file. - -If not specified, the [inferred path](#target-auto-discovery) is used based on -the target name. - -#### The `test` field - -The `test` field indicates whether or not the target is tested by default by -[`cargo test`]. The default is `true` for lib, bins, and tests. - -> **Note**: Examples are built by [`cargo test`] by default to ensure they -> continue to compile, but they are not *tested* by default. Setting `test = -> true` for an example will also build it as a test and run any -> [`#[test]`][test-attribute] functions defined in the example. - -#### The `doctest` field - -The `doctest` field indicates whether or not [documentation examples] are -tested by default by [`cargo test`]. This is only relevant for libraries, it -has no effect on other sections. The default is `true` for the library. - -#### The `bench` field - -The `bench` field indicates whether or not the target is benchmarked by -default by [`cargo bench`]. The default is `true` for lib, bins, and -benchmarks. - -#### The `doc` field - -The `doc` field indicates whether or not the target is included in the -documentation generated by [`cargo doc`] by default. The default is `true` for -libraries and binaries. - -> **Note**: The binary will be skipped if its name is the same as the lib -> target. - -#### The `plugin` field - -This field is used for `rustc` plugins, which are being deprecated. - -#### The `proc-macro` field - -The `proc-macro` field indicates that the library is a [procedural macro] -([reference][proc-macro-reference]). This is only valid for the `[lib]` -target. - -#### The `harness` field - -The `harness` field indicates that the [`--test` flag] will be passed to -`rustc` which will automatically include the libtest library which is the -driver for collecting and running tests marked with the [`#[test]` -attribute][test-attribute] or benchmarks with the `#[bench]` attribute. The -default is `true` for all targets. - -If set to `false`, then you are responsible for defining a `main()` function -to run tests and benchmarks. - -Tests have the [`cfg(test)` conditional expression][cfg-test] enabled whether -or not the harness is enabled. - -#### The `edition` field - -The `edition` field defines the [Rust edition] the target will use. If not -specified, it defaults to the [`edition` field][package-edition] for the -`[package]`. This field should usually not be set, and is only intended for -advanced scenarios such as incrementally transitioning a large package to a -new edition. - -#### The `crate-type` field - -The `crate-type` field defines the [crate types] that will be generated by the -target. It is an array of strings, allowing you to specify multiple crate -types for a single target. This can only be specified for libraries and -examples. Binaries, tests, and benchmarks are always the "bin" crate type. The -defaults are: - -Target | Crate Type --------|----------- -Normal library | `"lib"` -Proc-macro library | `"proc-macro"` -Example | `"bin"` - -The available options are `bin`, `lib`, `rlib`, `dylib`, `cdylib`, -`staticlib`, and `proc-macro`. You can read more about the different crate -types in the [Rust Reference Manual][crate types]. - -#### The `required-features` field - -The `required-features` field specifies which [features] the target needs in -order to be built. If any of the required features are not enabled, the -target will be skipped. This is only relevant for the `[[bin]]`, `[[bench]]`, -`[[test]]`, and `[[example]]` sections, it has no effect on `[lib]`. - -```toml -[features] -# ... -postgres = [] -sqlite = [] -tools = [] - -[[bin]] -name = "my-pg-tool" -required-features = ["postgres", "tools"] -``` - - -### Target auto-discovery - -By default, Cargo automatically determines the targets to build based on the -[layout of the files][package layout] on the filesystem. The target -configuration tables, such as `[lib]`, `[[bin]]`, `[[test]]`, `[[bench]]`, or -`[[example]]`, can be used to add additional targets that don't follow the -standard directory layout. - -The automatic target discovery can be disabled so that only manually -configured targets will be built. Setting the keys `autobins`, `autoexamples`, -`autotests`, or `autobenches` to `false` in the `[package]` section will -disable auto-discovery of the corresponding target type. - -```toml -[package] -# ... -autobins = false -autoexamples = false -autotests = false -autobenches = false -``` - -Disabling automatic discovery should only be needed for specialized -situations. For example, if you have a library where you want a *module* named -`bin`, this would present a problem because Cargo would usually attempt to -compile anything in the `bin` directory as an executable. Here is a sample -layout of this scenario: - -```text -โ”œโ”€โ”€ Cargo.toml -โ””โ”€โ”€ src - ย ย  โ”œโ”€โ”€ lib.rs - ย ย  โ””โ”€โ”€ bin - ย ย  ย ย  โ””โ”€โ”€ mod.rs -``` - -To prevent Cargo from inferring `src/bin/mod.rs` as an executable, set -`autobins = false` in `Cargo.toml` to disable auto-discovery: - -```toml -[package] -# โ€ฆ -autobins = false -``` - -> **Note**: For packages with the 2015 edition, the default for auto-discovery -> is `false` if at least one target is manually defined in `Cargo.toml`. -> Beginning with the 2018 edition, the default is always `true`. - - -[Build cache]: ../guide/build-cache.md -[Rust Edition]: ../../edition-guide/index.html -[`--test` flag]: ../../rustc/command-line-arguments.html#option-test -[`cargo bench`]: ../commands/cargo-bench.md -[`cargo build`]: ../commands/cargo-build.md -[`cargo doc`]: ../commands/cargo-doc.md -[`cargo install`]: ../commands/cargo-install.md -[`cargo run`]: ../commands/cargo-run.md -[`cargo test`]: ../commands/cargo-test.md -[cfg-test]: ../../reference/conditional-compilation.html#test -[crate types]: ../../reference/linkage.html -[crates.io]: https://crates.io/ -[customized]: #configuring-a-target -[dependencies]: specifying-dependencies.md -[dev-dependencies]: specifying-dependencies.md#development-dependencies -[documentation examples]: ../../rustdoc/documentation-tests.html -[features]: features.md -[nightly channel]: ../../book/appendix-07-nightly-rust.html -[package layout]: ../guide/project-layout.md -[package-edition]: manifest.md#the-edition-field -[proc-macro-reference]: ../../reference/procedural-macros.html -[procedural macro]: ../../book/ch19-06-macros.html -[test-attribute]: ../../reference/attributes/testing.html#the-test-attribute diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/config.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/config.md deleted file mode 100644 index b363f5d58..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/config.md +++ /dev/null @@ -1,1088 +0,0 @@ -## Configuration - -This document explains how Cargoโ€™s configuration system works, as well as -available keys or configuration. For configuration of a package through its -manifest, see the [manifest format](manifest.md). - -### Hierarchical structure - -Cargo allows local configuration for a particular package as well as global -configuration. It looks for configuration files in the current directory and -all parent directories. If, for example, Cargo were invoked in -`/projects/foo/bar/baz`, then the following configuration files would be -probed for and unified in this order: - -* `/projects/foo/bar/baz/.cargo/config.toml` -* `/projects/foo/bar/.cargo/config.toml` -* `/projects/foo/.cargo/config.toml` -* `/projects/.cargo/config.toml` -* `/.cargo/config.toml` -* `$CARGO_HOME/config.toml` which defaults to: - * Windows: `%USERPROFILE%\.cargo\config.toml` - * Unix: `$HOME/.cargo/config.toml` - -With this structure, you can specify configuration per-package, and even -possibly check it into version control. You can also specify personal defaults -with a configuration file in your home directory. - -If a key is specified in multiple config files, the values will get merged -together. Numbers, strings, and booleans will use the value in the deeper -config directory taking precedence over ancestor directories, where the -home directory is the lowest priority. Arrays will be joined together. - -At present, when being invoked from a workspace, Cargo does not read config -files from crates within the workspace. i.e. if a workspace has two crates in -it, named `/projects/foo/bar/baz/mylib` and `/projects/foo/bar/baz/mybin`, and -there are Cargo configs at `/projects/foo/bar/baz/mylib/.cargo/config.toml` -and `/projects/foo/bar/baz/mybin/.cargo/config.toml`, Cargo does not read -those configuration files if it is invoked from the workspace root -(`/projects/foo/bar/baz/`). - -> **Note:** Cargo also reads config files without the `.toml` extension, such as -> `.cargo/config`. Support for the `.toml` extension was added in version 1.39 -> and is the preferred form. If both files exist, Cargo will use the file -> without the extension. - -### Configuration format - -Configuration files are written in the [TOML format][toml] (like the -manifest), with simple key-value pairs inside of sections (tables). The -following is a quick overview of all settings, with detailed descriptions -found below. - -```toml -paths = ["/path/to/override"] # path dependency overrides - -[alias] # command aliases -b = "build" -c = "check" -t = "test" -r = "run" -rr = "run --release" -space_example = ["run", "--release", "--", "\"command list\""] - -[build] -jobs = 1 # number of parallel jobs, defaults to # of CPUs -rustc = "rustc" # the rust compiler tool -rustc-wrapper = "โ€ฆ" # run this wrapper instead of `rustc` -rustc-workspace-wrapper = "โ€ฆ" # run this wrapper instead of `rustc` for workspace members -rustdoc = "rustdoc" # the doc generator tool -target = "triple" # build for the target triple (ignored by `cargo install`) -target-dir = "target" # path of where to place all generated artifacts -rustflags = ["โ€ฆ", "โ€ฆ"] # custom flags to pass to all compiler invocations -rustdocflags = ["โ€ฆ", "โ€ฆ"] # custom flags to pass to rustdoc -incremental = true # whether or not to enable incremental compilation -dep-info-basedir = "โ€ฆ" # path for the base directory for targets in depfiles -pipelining = true # rustc pipelining - -[doc] -browser = "chromium" # browser to use with `cargo doc --open`, - # overrides the `BROWSER` environment variable - -[env] -# Set ENV_VAR_NAME=value for any process run by Cargo -ENV_VAR_NAME = "value" -# Set even if already present in environment -ENV_VAR_NAME_2 = { value = "value", force = true } -# Value is relative to .cargo directory containing `config.toml`, make absolute -ENV_VAR_NAME_3 = { value = "relative/path", relative = true } - -[future-incompat-report] -frequency = 'always' # when to display a notification about a future incompat report - -[cargo-new] -vcs = "none" # VCS to use ('git', 'hg', 'pijul', 'fossil', 'none') - -[http] -debug = false # HTTP debugging -proxy = "host:port" # HTTP proxy in libcurl format -ssl-version = "tlsv1.3" # TLS version to use -ssl-version.max = "tlsv1.3" # maximum TLS version -ssl-version.min = "tlsv1.1" # minimum TLS version -timeout = 30 # timeout for each HTTP request, in seconds -low-speed-limit = 10 # network timeout threshold (bytes/sec) -cainfo = "cert.pem" # path to Certificate Authority (CA) bundle -check-revoke = true # check for SSL certificate revocation -multiplexing = true # HTTP/2 multiplexing -user-agent = "โ€ฆ" # the user-agent header - -[install] -root = "/some/path" # `cargo install` destination directory - -[net] -retry = 2 # network retries -git-fetch-with-cli = true # use the `git` executable for git operations -offline = true # do not access the network - -[patch.] -# Same keys as for [patch] in Cargo.toml - -[profile.] # Modify profile settings via config. -opt-level = 0 # Optimization level. -debug = true # Include debug info. -split-debuginfo = '...' # Debug info splitting behavior. -debug-assertions = true # Enables debug assertions. -overflow-checks = true # Enables runtime integer overflow checks. -lto = false # Sets link-time optimization. -panic = 'unwind' # The panic strategy. -incremental = true # Incremental compilation. -codegen-units = 16 # Number of code generation units. -rpath = false # Sets the rpath linking option. -[profile..build-override] # Overrides build-script settings. -# Same keys for a normal profile. -[profile..package.] # Override profile for a package. -# Same keys for a normal profile (minus `panic`, `lto`, and `rpath`). - -[registries.] # registries other than crates.io -index = "โ€ฆ" # URL of the registry index -token = "โ€ฆ" # authentication token for the registry - -[registry] -default = "โ€ฆ" # name of the default registry -token = "โ€ฆ" # authentication token for crates.io - -[source.] # source definition and replacement -replace-with = "โ€ฆ" # replace this source with the given named source -directory = "โ€ฆ" # path to a directory source -registry = "โ€ฆ" # URL to a registry source -local-registry = "โ€ฆ" # path to a local registry source -git = "โ€ฆ" # URL of a git repository source -branch = "โ€ฆ" # branch name for the git repository -tag = "โ€ฆ" # tag name for the git repository -rev = "โ€ฆ" # revision for the git repository - -[target.] -linker = "โ€ฆ" # linker to use -runner = "โ€ฆ" # wrapper to run executables -rustflags = ["โ€ฆ", "โ€ฆ"] # custom flags for `rustc` - -[target.] -runner = "โ€ฆ" # wrapper to run executables -rustflags = ["โ€ฆ", "โ€ฆ"] # custom flags for `rustc` - -[target..] # `links` build script override -rustc-link-lib = ["foo"] -rustc-link-search = ["/path/to/foo"] -rustc-flags = ["-L", "/some/path"] -rustc-cfg = ['key="value"'] -rustc-env = {key = "value"} -rustc-cdylib-link-arg = ["โ€ฆ"] -metadata_key1 = "value" -metadata_key2 = "value" - -[term] -quiet = false # whether cargo output is quiet -verbose = false # whether cargo provides verbose output -color = 'auto' # whether cargo colorizes output -progress.when = 'auto' # whether cargo shows progress bar -progress.width = 80 # width of progress bar -``` - -### Environment variables - -Cargo can also be configured through environment variables in addition to the -TOML configuration files. For each configuration key of the form `foo.bar` the -environment variable `CARGO_FOO_BAR` can also be used to define the value. -Keys are converted to uppercase, dots and dashes are converted to underscores. -For example the `target.x86_64-unknown-linux-gnu.runner` key can also be -defined by the `CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER` environment -variable. - -Environment variables will take precedence over TOML configuration files. -Currently only integer, boolean, string and some array values are supported to -be defined by environment variables. Descriptions below indicate which keys -support environment variables. - -In addition to the system above, Cargo recognizes a few other specific -[environment variables][env]. - -### Config-relative paths - -Paths in config files may be absolute, relative, or a bare name without any -path separators. Paths for executables without a path separator will use the -`PATH` environment variable to search for the executable. Paths for -non-executables will be relative to where the config value is defined. For -config files, that is relative to the parent directory of the `.cargo` -directory where the value was defined. For environment variables it is -relative to the current working directory. - -```toml -# Relative path examples. - -[target.x86_64-unknown-linux-gnu] -runner = "foo" # Searches `PATH` for `foo`. - -[source.vendored-sources] -# Directory is relative to the parent where `.cargo/config.toml` is located. -# For example, `/my/project/.cargo/config.toml` would result in `/my/project/vendor`. -directory = "vendor" -``` - -### Executable paths with arguments - -Some Cargo commands invoke external programs, which can be configured as a path -and some number of arguments. - -The value may be an array of strings like `['/path/to/program', 'somearg']` or -a space-separated string like `'/path/to/program somearg'`. If the path to the -executable contains a space, the list form must be used. - -If Cargo is passing other arguments to the program such as a path to open or -run, they will be passed after the last specified argument in the value of an -option of this format. If the specified program does not have path separators, -Cargo will search `PATH` for its executable. - -### Credentials - -Configuration values with sensitive information are stored in the -`$CARGO_HOME/credentials.toml` file. This file is automatically created and updated -by [`cargo login`]. It follows the same format as Cargo config files. - -```toml -[registry] -token = "โ€ฆ" # Access token for crates.io - -[registries.] -token = "โ€ฆ" # Access token for the named registry -``` - -Tokens are used by some Cargo commands such as [`cargo publish`] for -authenticating with remote registries. Care should be taken to protect the -tokens and to keep them secret. - -As with most other config values, tokens may be specified with environment -variables. The token for [crates.io] may be specified with the -`CARGO_REGISTRY_TOKEN` environment variable. Tokens for other registries may -be specified with environment variables of the form -`CARGO_REGISTRIES__TOKEN` where `` is the name of the registry in -all capital letters. - -### Configuration keys - -This section documents all configuration keys. The description for keys with -variable parts are annotated with angled brackets like `target.` where -the `` part can be any target triple like -`target.x86_64-pc-windows-msvc`. - -#### `paths` -* Type: array of strings (paths) -* Default: none -* Environment: not supported - -An array of paths to local packages which are to be used as overrides for -dependencies. For more information see the [Overriding Dependencies -guide](overriding-dependencies.md#paths-overrides). - -#### `[alias]` -* Type: string or array of strings -* Default: see below -* Environment: `CARGO_ALIAS_` - -The `[alias]` table defines CLI command aliases. For example, running `cargo -b` is an alias for running `cargo build`. Each key in the table is the -subcommand, and the value is the actual command to run. The value may be an -array of strings, where the first element is the command and the following are -arguments. It may also be a string, which will be split on spaces into -subcommand and arguments. The following aliases are built-in to Cargo: - -```toml -[alias] -b = "build" -c = "check" -d = "doc" -t = "test" -r = "run" -``` - -Aliases are not allowed to redefine existing built-in commands. - -#### `[build]` - -The `[build]` table controls build-time operations and compiler settings. - -##### `build.jobs` -* Type: integer -* Default: number of logical CPUs -* Environment: `CARGO_BUILD_JOBS` - -Sets the maximum number of compiler processes to run in parallel. - -Can be overridden with the `--jobs` CLI option. - -##### `build.rustc` -* Type: string (program path) -* Default: "rustc" -* Environment: `CARGO_BUILD_RUSTC` or `RUSTC` - -Sets the executable to use for `rustc`. - -##### `build.rustc-wrapper` -* Type: string (program path) -* Default: none -* Environment: `CARGO_BUILD_RUSTC_WRAPPER` or `RUSTC_WRAPPER` - -Sets a wrapper to execute instead of `rustc`. The first argument passed to the -wrapper is the path to the actual `rustc`. - -##### `build.rustc-workspace-wrapper` -* Type: string (program path) -* Default: none -* Environment: `CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER` or `RUSTC_WORKSPACE_WRAPPER` - -Sets a wrapper to execute instead of `rustc`, for workspace members only. -The first argument passed to the wrapper is the path to the actual `rustc`. -It affects the filename hash so that artifacts produced by the wrapper are cached separately. - -##### `build.rustdoc` -* Type: string (program path) -* Default: "rustdoc" -* Environment: `CARGO_BUILD_RUSTDOC` or `RUSTDOC` - -Sets the executable to use for `rustdoc`. - -##### `build.target` -* Type: string -* Default: host platform -* Environment: `CARGO_BUILD_TARGET` - -The default target platform triple to compile to. - -This may also be a relative path to a `.json` target spec file. - -Can be overridden with the `--target` CLI option. - -##### `build.target-dir` -* Type: string (path) -* Default: "target" -* Environment: `CARGO_BUILD_TARGET_DIR` or `CARGO_TARGET_DIR` - -The path to where all compiler output is placed. The default if not specified -is a directory named `target` located at the root of the workspace. - -Can be overridden with the `--target-dir` CLI option. - -##### `build.rustflags` -* Type: string or array of strings -* Default: none -* Environment: `CARGO_BUILD_RUSTFLAGS` or `RUSTFLAGS` - -Extra command-line flags to pass to `rustc`. The value may be a array of -strings or a space-separated string. - -There are three mutually exclusive sources of extra flags. They are checked in -order, with the first one being used: - -1. `RUSTFLAGS` environment variable. -2. All matching `target..rustflags` and `target..rustflags` - config entries joined together. -3. `build.rustflags` config value. - -Additional flags may also be passed with the [`cargo rustc`] command. - -If the `--target` flag (or [`build.target`](#buildtarget)) is used, then the -flags will only be passed to the compiler for the target. Things being built -for the host, such as build scripts or proc macros, will not receive the args. -Without `--target`, the flags will be passed to all compiler invocations -(including build scripts and proc macros) because dependencies are shared. If -you have args that you do not want to pass to build scripts or proc macros and -are building for the host, pass `--target` with the host triple. - -It is not recommended to pass in flags that Cargo itself usually manages. For -example, the flags driven by [profiles] are best handled by setting the -appropriate profile setting. - -> **Caution**: Due to the low-level nature of passing flags directly to the -> compiler, this may cause a conflict with future versions of Cargo which may -> issue the same or similar flags on its own which may interfere with the -> flags you specify. This is an area where Cargo may not always be backwards -> compatible. - -##### `build.rustdocflags` -* Type: string or array of strings -* Default: none -* Environment: `CARGO_BUILD_RUSTDOCFLAGS` or `RUSTDOCFLAGS` - -Extra command-line flags to pass to `rustdoc`. The value may be a array of -strings or a space-separated string. - -There are two mutually exclusive sources of extra flags. They are checked in -order, with the first one being used: - -1. `RUSTDOCFLAGS` environment variable. -2. `build.rustdocflags` config value. - -Additional flags may also be passed with the [`cargo rustdoc`] command. - -##### `build.incremental` -* Type: bool -* Default: from profile -* Environment: `CARGO_BUILD_INCREMENTAL` or `CARGO_INCREMENTAL` - -Whether or not to perform [incremental compilation]. The default if not set is -to use the value from the [profile]. Otherwise this overrides the setting of -all profiles. - -The `CARGO_INCREMENTAL` environment variable can be set to `1` to force enable -incremental compilation for all profiles, or `0` to disable it. This env var -overrides the config setting. - -##### `build.dep-info-basedir` -* Type: string (path) -* Default: none -* Environment: `CARGO_BUILD_DEP_INFO_BASEDIR` - -Strips the given path prefix from [dep -info](../guide/build-cache.md#dep-info-files) file paths. This config setting -is intended to convert absolute paths to relative paths for tools that require -relative paths. - -The setting itself is a config-relative path. So, for example, a value of -`"."` would strip all paths starting with the parent directory of the `.cargo` -directory. - -##### `build.pipelining` -* Type: boolean -* Default: true -* Environment: `CARGO_BUILD_PIPELINING` - -Controls whether or not build pipelining is used. This allows Cargo to -schedule overlapping invocations of `rustc` in parallel when possible. - -#### `[doc]` - -The `[doc]` table defines options for the [`cargo doc`] command. - -##### `doc.browser` - -* Type: string or array of strings ([program path and args]) -* Default: `BROWSER` environment variable, or, if that is missing, - opening the link in a system specific way - -This option sets the browser to be used by [`cargo doc`], overriding the -`BROWSER` environment variable when opening documentation with the `--open` -option. - -#### `[cargo-new]` - -The `[cargo-new]` table defines defaults for the [`cargo new`] command. - -##### `cargo-new.name` - -This option is deprecated and unused. - -##### `cargo-new.email` - -This option is deprecated and unused. - -##### `cargo-new.vcs` -* Type: string -* Default: "git" or "none" -* Environment: `CARGO_CARGO_NEW_VCS` - -Specifies the source control system to use for initializing a new repository. -Valid values are `git`, `hg` (for Mercurial), `pijul`, `fossil` or `none` to -disable this behavior. Defaults to `git`, or `none` if already inside a VCS -repository. Can be overridden with the `--vcs` CLI option. - -### `[env]` - -The `[env]` section allows you to set additional environment variables for -build scripts, rustc invocations, `cargo run` and `cargo build`. - -```toml -[env] -OPENSSL_DIR = "/opt/openssl" -``` - -By default, the variables specified will not override values that already exist -in the environment. This behavior can be changed by setting the `force` flag. - -Setting the `relative` flag evaluates the value as a config-relative path that -is relative to the parent directory of the `.cargo` directory that contains the -`config.toml` file. The value of the environment variable will be the full -absolute path. - -```toml -[env] -TMPDIR = { value = "/home/tmp", force = true } -OPENSSL_DIR = { value = "vendor/openssl", relative = true } -``` - -### `[future-incompat-report]` - -The `[future-incompat-report]` table controls setting for [future incompat reporting](future-incompat-report.md) - -#### `future-incompat-report.frequency` -* Type: string -* Default: "always" -* Environment: `CARGO_FUTURE_INCOMPAT_REPORT_FREQUENCY` - -Controls how often we display a notification to the terminal when a future incompat report is available. Possible values: - -* `always` (default): Always display a notification when a command (e.g. `cargo build`) produces a future incompat report -* `never`: Never display a notification - -#### `[http]` - -The `[http]` table defines settings for HTTP behavior. This includes fetching -crate dependencies and accessing remote git repositories. - -##### `http.debug` -* Type: boolean -* Default: false -* Environment: `CARGO_HTTP_DEBUG` - -If `true`, enables debugging of HTTP requests. The debug information can be -seen by setting the `CARGO_LOG=cargo::ops::registry=debug` environment -variable (or use `trace` for even more information). - -Be wary when posting logs from this output in a public location. The output -may include headers with authentication tokens which you don't want to leak! -Be sure to review logs before posting them. - -##### `http.proxy` -* Type: string -* Default: none -* Environment: `CARGO_HTTP_PROXY` or `HTTPS_PROXY` or `https_proxy` or `http_proxy` - -Sets an HTTP and HTTPS proxy to use. The format is in [libcurl format] as in -`[protocol://]host[:port]`. If not set, Cargo will also check the `http.proxy` -setting in your global git configuration. If none of those are set, the -`HTTPS_PROXY` or `https_proxy` environment variables set the proxy for HTTPS -requests, and `http_proxy` sets it for HTTP requests. - -##### `http.timeout` -* Type: integer -* Default: 30 -* Environment: `CARGO_HTTP_TIMEOUT` or `HTTP_TIMEOUT` - -Sets the timeout for each HTTP request, in seconds. - -##### `http.cainfo` -* Type: string (path) -* Default: none -* Environment: `CARGO_HTTP_CAINFO` - -Path to a Certificate Authority (CA) bundle file, used to verify TLS -certificates. If not specified, Cargo attempts to use the system certificates. - -##### `http.check-revoke` -* Type: boolean -* Default: true (Windows) false (all others) -* Environment: `CARGO_HTTP_CHECK_REVOKE` - -This determines whether or not TLS certificate revocation checks should be -performed. This only works on Windows. - -##### `http.ssl-version` -* Type: string or min/max table -* Default: none -* Environment: `CARGO_HTTP_SSL_VERSION` - -This sets the minimum TLS version to use. It takes a string, with one of the -possible values of "default", "tlsv1", "tlsv1.0", "tlsv1.1", "tlsv1.2", or -"tlsv1.3". - -This may alternatively take a table with two keys, `min` and `max`, which each -take a string value of the same kind that specifies the minimum and maximum -range of TLS versions to use. - -The default is a minimum version of "tlsv1.0" and a max of the newest version -supported on your platform, typically "tlsv1.3". - -##### `http.low-speed-limit` -* Type: integer -* Default: 10 -* Environment: `CARGO_HTTP_LOW_SPEED_LIMIT` - -This setting controls timeout behavior for slow connections. If the average -transfer speed in bytes per second is below the given value for -[`http.timeout`](#httptimeout) seconds (default 30 seconds), then the -connection is considered too slow and Cargo will abort and retry. - -##### `http.multiplexing` -* Type: boolean -* Default: true -* Environment: `CARGO_HTTP_MULTIPLEXING` - -When `true`, Cargo will attempt to use the HTTP2 protocol with multiplexing. -This allows multiple requests to use the same connection, usually improving -performance when fetching multiple files. If `false`, Cargo will use HTTP 1.1 -without pipelining. - -##### `http.user-agent` -* Type: string -* Default: Cargo's version -* Environment: `CARGO_HTTP_USER_AGENT` - -Specifies a custom user-agent header to use. The default if not specified is a -string that includes Cargo's version. - -#### `[install]` - -The `[install]` table defines defaults for the [`cargo install`] command. - -##### `install.root` -* Type: string (path) -* Default: Cargo's home directory -* Environment: `CARGO_INSTALL_ROOT` - -Sets the path to the root directory for installing executables for [`cargo -install`]. Executables go into a `bin` directory underneath the root. - -To track information of installed executables, some extra files, such as -`.crates.toml` and `.crates2.json`, are also created under this root. - -The default if not specified is Cargo's home directory (default `.cargo` in -your home directory). - -Can be overridden with the `--root` command-line option. - -#### `[net]` - -The `[net]` table controls networking configuration. - -##### `net.retry` -* Type: integer -* Default: 2 -* Environment: `CARGO_NET_RETRY` - -Number of times to retry possibly spurious network errors. - -##### `net.git-fetch-with-cli` -* Type: boolean -* Default: false -* Environment: `CARGO_NET_GIT_FETCH_WITH_CLI` - -If this is `true`, then Cargo will use the `git` executable to fetch registry -indexes and git dependencies. If `false`, then it uses a built-in `git` -library. - -Setting this to `true` can be helpful if you have special authentication -requirements that Cargo does not support. See [Git -Authentication](../appendix/git-authentication.md) for more information about -setting up git authentication. - -##### `net.offline` -* Type: boolean -* Default: false -* Environment: `CARGO_NET_OFFLINE` - -If this is `true`, then Cargo will avoid accessing the network, and attempt to -proceed with locally cached data. If `false`, Cargo will access the network as -needed, and generate an error if it encounters a network error. - -Can be overridden with the `--offline` command-line option. - -#### `[patch]` - -Just as you can override dependencies using [`[patch]` in -`Cargo.toml`](overriding-dependencies.md#the-patch-section), you can -override them in the cargo configuration file to apply those patches to -any affected build. The format is identical to the one used in -`Cargo.toml`. - -Since `.cargo/config.toml` files are not usually checked into source -control, you should prefer patching using `Cargo.toml` where possible to -ensure that other developers can compile your crate in their own -environments. Patching through cargo configuration files is generally -only appropriate when the patch section is automatically generated by an -external build tool. - -If a given dependency is patched both in a cargo configuration file and -a `Cargo.toml` file, the patch in the configuration file is used. If -multiple configuration files patch the same dependency, standard cargo -configuration merging is used, which prefers the value defined closest -to the current directory, with `$HOME/.cargo/config.toml` taking the -lowest precedence. - -Relative `path` dependencies in such a `[patch]` section are resolved -relative to the configuration file they appear in. - -#### `[profile]` - -The `[profile]` table can be used to globally change profile settings, and -override settings specified in `Cargo.toml`. It has the same syntax and -options as profiles specified in `Cargo.toml`. See the [Profiles chapter] for -details about the options. - -[Profiles chapter]: profiles.md - -##### `[profile..build-override]` -* Environment: `CARGO_PROFILE__BUILD_OVERRIDE_` - -The build-override table overrides settings for build scripts, proc macros, -and their dependencies. It has the same keys as a normal profile. See the -[overrides section](profiles.md#overrides) for more details. - -##### `[profile..package.]` -* Environment: not supported - -The package table overrides settings for specific packages. It has the same -keys as a normal profile, minus the `panic`, `lto`, and `rpath` settings. See -the [overrides section](profiles.md#overrides) for more details. - -##### `profile..codegen-units` -* Type: integer -* Default: See profile docs. -* Environment: `CARGO_PROFILE__CODEGEN_UNITS` - -See [codegen-units](profiles.md#codegen-units). - -##### `profile..debug` -* Type: integer or boolean -* Default: See profile docs. -* Environment: `CARGO_PROFILE__DEBUG` - -See [debug](profiles.md#debug). - -##### `profile..split-debuginfo` -* Type: string -* Default: See profile docs. -* Environment: `CARGO_PROFILE__SPLIT_DEBUGINFO` - -See [split-debuginfo](profiles.md#split-debuginfo). - -##### `profile..debug-assertions` -* Type: boolean -* Default: See profile docs. -* Environment: `CARGO_PROFILE__DEBUG_ASSERTIONS` - -See [debug-assertions](profiles.md#debug-assertions). - -##### `profile..incremental` -* Type: boolean -* Default: See profile docs. -* Environment: `CARGO_PROFILE__INCREMENTAL` - -See [incremental](profiles.md#incremental). - -##### `profile..lto` -* Type: string or boolean -* Default: See profile docs. -* Environment: `CARGO_PROFILE__LTO` - -See [lto](profiles.md#lto). - -##### `profile..overflow-checks` -* Type: boolean -* Default: See profile docs. -* Environment: `CARGO_PROFILE__OVERFLOW_CHECKS` - -See [overflow-checks](profiles.md#overflow-checks). - -##### `profile..opt-level` -* Type: integer or string -* Default: See profile docs. -* Environment: `CARGO_PROFILE__OPT_LEVEL` - -See [opt-level](profiles.md#opt-level). - -##### `profile..panic` -* Type: string -* default: See profile docs. -* Environment: `CARGO_PROFILE__PANIC` - -See [panic](profiles.md#panic). - -##### `profile..rpath` -* Type: boolean -* default: See profile docs. -* Environment: `CARGO_PROFILE__RPATH` - -See [rpath](profiles.md#rpath). - - -#### `[registries]` - -The `[registries]` table is used for specifying additional [registries]. It -consists of a sub-table for each named registry. - -##### `registries..index` -* Type: string (url) -* Default: none -* Environment: `CARGO_REGISTRIES__INDEX` - -Specifies the URL of the git index for the registry. - -##### `registries..token` -* Type: string -* Default: none -* Environment: `CARGO_REGISTRIES__TOKEN` - -Specifies the authentication token for the given registry. This value should -only appear in the [credentials](#credentials) file. This is used for registry -commands like [`cargo publish`] that require authentication. - -Can be overridden with the `--token` command-line option. - -#### `[registry]` - -The `[registry]` table controls the default registry used when one is not -specified. - -##### `registry.index` - -This value is no longer accepted and should not be used. - -##### `registry.default` -* Type: string -* Default: `"crates-io"` -* Environment: `CARGO_REGISTRY_DEFAULT` - -The name of the registry (from the [`registries` table](#registries)) to use -by default for registry commands like [`cargo publish`]. - -Can be overridden with the `--registry` command-line option. - -##### `registry.token` -* Type: string -* Default: none -* Environment: `CARGO_REGISTRY_TOKEN` - -Specifies the authentication token for [crates.io]. This value should only -appear in the [credentials](#credentials) file. This is used for registry -commands like [`cargo publish`] that require authentication. - -Can be overridden with the `--token` command-line option. - -#### `[source]` - -The `[source]` table defines the registry sources available. See [Source -Replacement] for more information. It consists of a sub-table for each named -source. A source should only define one kind (directory, registry, -local-registry, or git). - -##### `source..replace-with` -* Type: string -* Default: none -* Environment: not supported - -If set, replace this source with the given named source. - -##### `source..directory` -* Type: string (path) -* Default: none -* Environment: not supported - -Sets the path to a directory to use as a directory source. - -##### `source..registry` -* Type: string (url) -* Default: none -* Environment: not supported - -Sets the URL to use for a registry source. - -##### `source..local-registry` -* Type: string (path) -* Default: none -* Environment: not supported - -Sets the path to a directory to use as a local registry source. - -##### `source..git` -* Type: string (url) -* Default: none -* Environment: not supported - -Sets the URL to use for a git repository source. - -##### `source..branch` -* Type: string -* Default: none -* Environment: not supported - -Sets the branch name to use for a git repository. - -If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch. - -##### `source..tag` -* Type: string -* Default: none -* Environment: not supported - -Sets the tag name to use for a git repository. - -If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch. - -##### `source..rev` -* Type: string -* Default: none -* Environment: not supported - -Sets the [revision] to use for a git repository. - -If none of `branch`, `tag`, or `rev` is set, defaults to the `master` branch. - - -#### `[target]` - -The `[target]` table is used for specifying settings for specific platform -targets. It consists of a sub-table which is either a platform triple or a -[`cfg()` expression]. The given values will be used if the target platform -matches either the `` value or the `` expression. - -```toml -[target.thumbv7m-none-eabi] -linker = "arm-none-eabi-gcc" -runner = "my-emulator" -rustflags = ["โ€ฆ", "โ€ฆ"] - -[target.'cfg(all(target_arch = "arm", target_os = "none"))'] -runner = "my-arm-wrapper" -rustflags = ["โ€ฆ", "โ€ฆ"] -``` - -`cfg` values come from those built-in to the compiler (run `rustc --print=cfg` -to view), values set by [build scripts], and extra `--cfg` flags passed to -`rustc` (such as those defined in `RUSTFLAGS`). Do not try to match on -`debug_assertions` or Cargo features like `feature="foo"`. - -If using a target spec JSON file, the `` value is the filename stem. -For example `--target foo/bar.json` would match `[target.bar]`. - -##### `target..ar` - -This option is deprecated and unused. - -##### `target..linker` -* Type: string (program path) -* Default: none -* Environment: `CARGO_TARGET__LINKER` - -Specifies the linker which is passed to `rustc` (via [`-C linker`]) when the -`` is being compiled for. By default, the linker is not overridden. - -##### `target..runner` -* Type: string or array of strings ([program path and args]) -* Default: none -* Environment: `CARGO_TARGET__RUNNER` - -If a runner is provided, executables for the target `` will be -executed by invoking the specified runner with the actual executable passed as -an argument. This applies to [`cargo run`], [`cargo test`] and [`cargo bench`] -commands. By default, compiled executables are executed directly. - -##### `target..runner` - -This is similar to the [target runner](#targettriplerunner), but using -a [`cfg()` expression]. If both a `` and `` runner match, -the `` will take precedence. It is an error if more than one -`` runner matches the current target. - -##### `target..rustflags` -* Type: string or array of strings -* Default: none -* Environment: `CARGO_TARGET__RUSTFLAGS` - -Passes a set of custom flags to the compiler for this ``. The value -may be a array of strings or a space-separated string. - -See [`build.rustflags`](#buildrustflags) for more details on the different -ways to specific extra flags. - -##### `target..rustflags` - -This is similar to the [target rustflags](#targettriplerustflags), but -using a [`cfg()` expression]. If several `` and `` entries -match the current target, the flags are joined together. - -##### `target..` - -The links sub-table provides a way to [override a build script]. When -specified, the build script for the given `links` library will not be -run, and the given values will be used instead. - -```toml -[target.x86_64-unknown-linux-gnu.foo] -rustc-link-lib = ["foo"] -rustc-link-search = ["/path/to/foo"] -rustc-flags = "-L /some/path" -rustc-cfg = ['key="value"'] -rustc-env = {key = "value"} -rustc-cdylib-link-arg = ["โ€ฆ"] -metadata_key1 = "value" -metadata_key2 = "value" -``` - -#### `[term]` - -The `[term]` table controls terminal output and interaction. - -##### `term.quiet` -* Type: boolean -* Default: false -* Environment: `CARGO_TERM_QUIET` - -Controls whether or not log messages are displayed by Cargo. - -Specifying the `--quiet` flag will override and force quiet output. -Specifying the `--verbose` flag will override and disable quiet output. - -##### `term.verbose` -* Type: boolean -* Default: false -* Environment: `CARGO_TERM_VERBOSE` - -Controls whether or not extra detailed messages are displayed by Cargo. - -Specifying the `--quiet` flag will override and disable verbose output. -Specifying the `--verbose` flag will override and force verbose output. - -##### `term.color` -* Type: string -* Default: "auto" -* Environment: `CARGO_TERM_COLOR` - -Controls whether or not colored output is used in the terminal. Possible values: - -* `auto` (default): Automatically detect if color support is available on the - terminal. -* `always`: Always display colors. -* `never`: Never display colors. - -Can be overridden with the `--color` command-line option. - -##### `term.progress.when` -* Type: string -* Default: "auto" -* Environment: `CARGO_TERM_PROGRESS_WHEN` - -Controls whether or not progress bar is shown in the terminal. Possible values: - -* `auto` (default): Intelligently guess whether to show progress bar. -* `always`: Always show progress bar. -* `never`: Never show progress bar. - -##### `term.progress.width` -* Type: integer -* Default: none -* Environment: `CARGO_TERM_PROGRESS_WIDTH` - -Sets the width for progress bar. - -[`cargo bench`]: ../commands/cargo-bench.md -[`cargo login`]: ../commands/cargo-login.md -[`cargo doc`]: ../commands/cargo-doc.md -[`cargo new`]: ../commands/cargo-new.md -[`cargo publish`]: ../commands/cargo-publish.md -[`cargo run`]: ../commands/cargo-run.md -[`cargo rustc`]: ../commands/cargo-rustc.md -[`cargo test`]: ../commands/cargo-test.md -[`cargo rustdoc`]: ../commands/cargo-rustdoc.md -[`cargo install`]: ../commands/cargo-install.md -[env]: environment-variables.md -[`cfg()` expression]: ../../reference/conditional-compilation.html -[build scripts]: build-scripts.md -[`-C linker`]: ../../rustc/codegen-options/index.md#linker -[override a build script]: build-scripts.md#overriding-build-scripts -[toml]: https://toml.io/ -[incremental compilation]: profiles.md#incremental -[profile]: profiles.md -[program path with args]: #executable-paths-with-arguments -[libcurl format]: https://ec.haxx.se/usingcurl-proxies.html -[source replacement]: source-replacement.md -[revision]: https://git-scm.com/docs/gitrevisions -[registries]: registries.md -[crates.io]: https://crates.io/ diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/environment-variables.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/environment-variables.md deleted file mode 100644 index aa9a0fdc1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/environment-variables.md +++ /dev/null @@ -1,393 +0,0 @@ -## Environment Variables - -Cargo sets and reads a number of environment variables which your code can detect -or override. Here is a list of the variables Cargo sets, organized by when it interacts -with them: - -### Environment variables Cargo reads - -You can override these environment variables to change Cargo's behavior on your -system: - -* `CARGO_HOME` โ€” Cargo maintains a local cache of the registry index and of - git checkouts of crates. By default these are stored under `$HOME/.cargo` - (`%USERPROFILE%\.cargo` on Windows), but this variable overrides the - location of this directory. Once a crate is cached it is not removed by the - clean command. - For more details refer to the [guide](../guide/cargo-home.md). -* `CARGO_TARGET_DIR` โ€” Location of where to place all generated artifacts, - relative to the current working directory. See [`build.target-dir`] to set - via config. -* `RUSTC` โ€” Instead of running `rustc`, Cargo will execute this specified - compiler instead. See [`build.rustc`] to set via config. -* `RUSTC_WRAPPER` โ€” Instead of simply running `rustc`, Cargo will execute this - specified wrapper instead, passing as its command-line arguments the rustc - invocation, with the first argument being `rustc`. Useful to set up a build - cache tool such as `sccache`. See [`build.rustc-wrapper`] to set via config. -* `RUSTC_WORKSPACE_WRAPPER` โ€” Instead of simply running `rustc`, Cargo will - execute this specified wrapper instead for workspace members only, passing - as its command-line arguments the rustc invocation, with the first argument - being `rustc`. It affects the filename hash so that artifacts produced by - the wrapper are cached separately. See [`build.rustc-workspace-wrapper`] - to set via config. -* `RUSTDOC` โ€” Instead of running `rustdoc`, Cargo will execute this specified - `rustdoc` instance instead. See [`build.rustdoc`] to set via config. -* `RUSTDOCFLAGS` โ€” A space-separated list of custom flags to pass to all `rustdoc` - invocations that Cargo performs. In contrast with [`cargo rustdoc`], this is - useful for passing a flag to *all* `rustdoc` instances. See - [`build.rustdocflags`] for some more ways to set flags. This string is - split by whitespace; for a more robust encoding of multiple arguments, - set `CARGO_ENCODED_RUSTDOCFLAGS` instead with arguments separated by - `0x1f` (ASCII Unit Separator). -* `RUSTFLAGS` โ€” A space-separated list of custom flags to pass to all compiler - invocations that Cargo performs. In contrast with [`cargo rustc`], this is - useful for passing a flag to *all* compiler instances. See - [`build.rustflags`] for some more ways to set flags. This string is - split by whitespace; for a more robust encoding of multiple arguments, - set `CARGO_ENCODED_RUSTFLAGS` instead with arguments separated by - `0x1f` (ASCII Unit Separator). -* `CARGO_INCREMENTAL` โ€” If this is set to 1 then Cargo will force [incremental - compilation] to be enabled for the current compilation, and when set to 0 it - will force disabling it. If this env var isn't present then cargo's defaults - will otherwise be used. See also [`build.incremental`] config value. -* `CARGO_CACHE_RUSTC_INFO` โ€” If this is set to 0 then Cargo will not try to cache - compiler version information. -* `HTTPS_PROXY` or `https_proxy` or `http_proxy` โ€” The HTTP proxy to use, see - [`http.proxy`] for more detail. -* `HTTP_TIMEOUT` โ€” The HTTP timeout in seconds, see [`http.timeout`] for more - detail. -* `TERM` โ€” If this is set to `dumb`, it disables the progress bar. -* `BROWSER` โ€” The web browser to execute to open documentation with [`cargo - doc`]'s' `--open` flag, see [`doc.browser`] for more details. -* `RUSTFMT` โ€” Instead of running `rustfmt`, - [`cargo fmt`](https://github.com/rust-lang/rustfmt) will execute this specified - `rustfmt` instance instead. - -#### Configuration environment variables - -Cargo reads environment variables for configuration values. See the -[configuration chapter][config-env] for more details. In summary, the -supported environment variables are: - -* `CARGO_ALIAS_` โ€” Command aliases, see [`alias`]. -* `CARGO_BUILD_JOBS` โ€” Number of parallel jobs, see [`build.jobs`]. -* `CARGO_BUILD_RUSTC` โ€” The `rustc` executable, see [`build.rustc`]. -* `CARGO_BUILD_RUSTC_WRAPPER` โ€” The `rustc` wrapper, see [`build.rustc-wrapper`]. -* `CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER` โ€” The `rustc` wrapper for workspace members only, see [`build.rustc-workspace-wrapper`]. -* `CARGO_BUILD_RUSTDOC` โ€” The `rustdoc` executable, see [`build.rustdoc`]. -* `CARGO_BUILD_TARGET` โ€” The default target platform, see [`build.target`]. -* `CARGO_BUILD_TARGET_DIR` โ€” The default output directory, see [`build.target-dir`]. -* `CARGO_BUILD_RUSTFLAGS` โ€” Extra `rustc` flags, see [`build.rustflags`]. -* `CARGO_BUILD_RUSTDOCFLAGS` โ€” Extra `rustdoc` flags, see [`build.rustdocflags`]. -* `CARGO_BUILD_INCREMENTAL` โ€” Incremental compilation, see [`build.incremental`]. -* `CARGO_BUILD_DEP_INFO_BASEDIR` โ€” Dep-info relative directory, see [`build.dep-info-basedir`]. -* `CARGO_BUILD_PIPELINING` โ€” Whether or not to use `rustc` pipelining, see [`build.pipelining`]. -* `CARGO_CARGO_NEW_VCS` โ€” The default source control system with [`cargo new`], see [`cargo-new.vcs`]. -* `CARGO_FUTURE_INCOMPAT_REPORT_FREQUENCY` - How often we should generate a future incompat report notifcation, see [`future-incompat-report.frequency`]. -* `CARGO_HTTP_DEBUG` โ€” Enables HTTP debugging, see [`http.debug`]. -* `CARGO_HTTP_PROXY` โ€” Enables HTTP proxy, see [`http.proxy`]. -* `CARGO_HTTP_TIMEOUT` โ€” The HTTP timeout, see [`http.timeout`]. -* `CARGO_HTTP_CAINFO` โ€” The TLS certificate Certificate Authority file, see [`http.cainfo`]. -* `CARGO_HTTP_CHECK_REVOKE` โ€” Disables TLS certificate revocation checks, see [`http.check-revoke`]. -* `CARGO_HTTP_SSL_VERSION` โ€” The TLS version to use, see [`http.ssl-version`]. -* `CARGO_HTTP_LOW_SPEED_LIMIT` โ€” The HTTP low-speed limit, see [`http.low-speed-limit`]. -* `CARGO_HTTP_MULTIPLEXING` โ€” Whether HTTP/2 multiplexing is used, see [`http.multiplexing`]. -* `CARGO_HTTP_USER_AGENT` โ€” The HTTP user-agent header, see [`http.user-agent`]. -* `CARGO_INSTALL_ROOT` โ€” The default directory for [`cargo install`], see [`install.root`]. -* `CARGO_NET_RETRY` โ€” Number of times to retry network errors, see [`net.retry`]. -* `CARGO_NET_GIT_FETCH_WITH_CLI` โ€” Enables the use of the `git` executable to fetch, see [`net.git-fetch-with-cli`]. -* `CARGO_NET_OFFLINE` โ€” Offline mode, see [`net.offline`]. -* `CARGO_PROFILE__BUILD_OVERRIDE_` โ€” Override build script profile, see [`profile..build-override`]. -* `CARGO_PROFILE__CODEGEN_UNITS` โ€” Set code generation units, see [`profile..codegen-units`]. -* `CARGO_PROFILE__DEBUG` โ€” What kind of debug info to include, see [`profile..debug`]. -* `CARGO_PROFILE__DEBUG_ASSERTIONS` โ€” Enable/disable debug assertions, see [`profile..debug-assertions`]. -* `CARGO_PROFILE__INCREMENTAL` โ€” Enable/disable incremental compilation, see [`profile..incremental`]. -* `CARGO_PROFILE__LTO` โ€” Link-time optimization, see [`profile..lto`]. -* `CARGO_PROFILE__OVERFLOW_CHECKS` โ€” Enable/disable overflow checks, see [`profile..overflow-checks`]. -* `CARGO_PROFILE__OPT_LEVEL` โ€” Set the optimization level, see [`profile..opt-level`]. -* `CARGO_PROFILE__PANIC` โ€” The panic strategy to use, see [`profile..panic`]. -* `CARGO_PROFILE__RPATH` โ€” The rpath linking option, see [`profile..rpath`]. -* `CARGO_PROFILE__SPLIT_DEBUGINFO` โ€” Controls debug file output behavior, see [`profile..split-debuginfo`]. -* `CARGO_REGISTRIES__INDEX` โ€” URL of a registry index, see [`registries..index`]. -* `CARGO_REGISTRIES__TOKEN` โ€” Authentication token of a registry, see [`registries..token`]. -* `CARGO_REGISTRY_DEFAULT` โ€” Default registry for the `--registry` flag, see [`registry.default`]. -* `CARGO_REGISTRY_TOKEN` โ€” Authentication token for [crates.io], see [`registry.token`]. -* `CARGO_TARGET__LINKER` โ€” The linker to use, see [`target..linker`]. The triple must be [converted to uppercase and underscores](config.md#environment-variables). -* `CARGO_TARGET__RUNNER` โ€” The executable runner, see [`target..runner`]. -* `CARGO_TARGET__RUSTFLAGS` โ€” Extra `rustc` flags for a target, see [`target..rustflags`]. -* `CARGO_TERM_QUIET` โ€” Quiet mode, see [`term.quiet`]. -* `CARGO_TERM_VERBOSE` โ€” The default terminal verbosity, see [`term.verbose`]. -* `CARGO_TERM_COLOR` โ€” The default color mode, see [`term.color`]. -* `CARGO_TERM_PROGRESS_WHEN` โ€” The default progress bar showing mode, see [`term.progress.when`]. -* `CARGO_TERM_PROGRESS_WIDTH` โ€” The default progress bar width, see [`term.progress.width`]. - -[`cargo doc`]: ../commands/cargo-doc.md -[`cargo install`]: ../commands/cargo-install.md -[`cargo new`]: ../commands/cargo-new.md -[`cargo rustc`]: ../commands/cargo-rustc.md -[`cargo rustdoc`]: ../commands/cargo-rustdoc.md -[config-env]: config.md#environment-variables -[crates.io]: https://crates.io/ -[incremental compilation]: profiles.md#incremental -[`alias`]: config.md#alias -[`build.jobs`]: config.md#buildjobs -[`build.rustc`]: config.md#buildrustc -[`build.rustc-wrapper`]: config.md#buildrustc-wrapper -[`build.rustc-workspace-wrapper`]: config.md#buildrustc-workspace-wrapper -[`build.rustdoc`]: config.md#buildrustdoc -[`build.target`]: config.md#buildtarget -[`build.target-dir`]: config.md#buildtarget-dir -[`build.rustflags`]: config.md#buildrustflags -[`build.rustdocflags`]: config.md#buildrustdocflags -[`build.incremental`]: config.md#buildincremental -[`build.dep-info-basedir`]: config.md#builddep-info-basedir -[`build.pipelining`]: config.md#buildpipelining -[`doc.browser`]: config.md#docbrowser -[`cargo-new.name`]: config.md#cargo-newname -[`cargo-new.email`]: config.md#cargo-newemail -[`cargo-new.vcs`]: config.md#cargo-newvcs -[`future-incompat-report.frequency`]: config.md#future-incompat-reportfrequency -[`http.debug`]: config.md#httpdebug -[`http.proxy`]: config.md#httpproxy -[`http.timeout`]: config.md#httptimeout -[`http.cainfo`]: config.md#httpcainfo -[`http.check-revoke`]: config.md#httpcheck-revoke -[`http.ssl-version`]: config.md#httpssl-version -[`http.low-speed-limit`]: config.md#httplow-speed-limit -[`http.multiplexing`]: config.md#httpmultiplexing -[`http.user-agent`]: config.md#httpuser-agent -[`install.root`]: config.md#installroot -[`net.retry`]: config.md#netretry -[`net.git-fetch-with-cli`]: config.md#netgit-fetch-with-cli -[`net.offline`]: config.md#netoffline -[`profile..build-override`]: config.md#profilenamebuild-override -[`profile..codegen-units`]: config.md#profilenamecodegen-units -[`profile..debug`]: config.md#profilenamedebug -[`profile..debug-assertions`]: config.md#profilenamedebug-assertions -[`profile..incremental`]: config.md#profilenameincremental -[`profile..lto`]: config.md#profilenamelto -[`profile..overflow-checks`]: config.md#profilenameoverflow-checks -[`profile..opt-level`]: config.md#profilenameopt-level -[`profile..panic`]: config.md#profilenamepanic -[`profile..rpath`]: config.md#profilenamerpath -[`profile..split-debuginfo`]: config.md#profilenamesplit-debuginfo -[`registries..index`]: config.md#registriesnameindex -[`registries..token`]: config.md#registriesnametoken -[`registry.default`]: config.md#registrydefault -[`registry.token`]: config.md#registrytoken -[`target..linker`]: config.md#targettriplelinker -[`target..runner`]: config.md#targettriplerunner -[`target..rustflags`]: config.md#targettriplerustflags -[`term.quiet`]: config.md#termquiet -[`term.verbose`]: config.md#termverbose -[`term.color`]: config.md#termcolor -[`term.progress.when`]: config.md#termprogresswhen -[`term.progress.width`]: config.md#termprogresswidth - -### Environment variables Cargo sets for crates - -Cargo exposes these environment variables to your crate when it is compiled. -Note that this applies for running binaries with `cargo run` and `cargo test` -as well. To get the value of any of these variables in a Rust program, do -this: - -```rust,ignore -let version = env!("CARGO_PKG_VERSION"); -``` - -`version` will now contain the value of `CARGO_PKG_VERSION`. - -Note that if one of these values is not provided in the manifest, the -corresponding environment variable is set to the empty string, `""`. - -* `CARGO` โ€” Path to the `cargo` binary performing the build. -* `CARGO_MANIFEST_DIR` โ€” The directory containing the manifest of your package. -* `CARGO_PKG_VERSION` โ€” The full version of your package. -* `CARGO_PKG_VERSION_MAJOR` โ€” The major version of your package. -* `CARGO_PKG_VERSION_MINOR` โ€” The minor version of your package. -* `CARGO_PKG_VERSION_PATCH` โ€” The patch version of your package. -* `CARGO_PKG_VERSION_PRE` โ€” The pre-release version of your package. -* `CARGO_PKG_AUTHORS` โ€” Colon separated list of authors from the manifest of your package. -* `CARGO_PKG_NAME` โ€” The name of your package. -* `CARGO_PKG_DESCRIPTION` โ€” The description from the manifest of your package. -* `CARGO_PKG_HOMEPAGE` โ€” The home page from the manifest of your package. -* `CARGO_PKG_REPOSITORY` โ€” The repository from the manifest of your package. -* `CARGO_PKG_LICENSE` โ€” The license from the manifest of your package. -* `CARGO_PKG_LICENSE_FILE` โ€” The license file from the manifest of your package. -* `CARGO_CRATE_NAME` โ€” The name of the crate that is currently being compiled. -* `CARGO_BIN_NAME` โ€” The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as `.exe`. -* `OUT_DIR` โ€” If the package has a build script, this is set to the folder where the build - script should place its output. See below for more information. - (Only set during compilation.) -* `CARGO_BIN_EXE_` โ€” The absolute path to a binary target's executable. - This is only set when building an [integration test] or benchmark. This may - be used with the [`env` macro] to find the executable to run for testing - purposes. The `` is the name of the binary target, exactly as-is. For - example, `CARGO_BIN_EXE_my-program` for a binary named `my-program`. - Binaries are automatically built when the test is built, unless the binary - has required features that are not enabled. -* `CARGO_PRIMARY_PACKAGE` โ€” This environment variable will be set if the - package being built is primary. Primary packages are the ones the user - selected on the command-line, either with `-p` flags or the defaults based - on the current directory and the default workspace members. This environment - variable will not be set when building dependencies. This is only set when - compiling the package (not when running binaries or tests). -* `CARGO_TARGET_TMPDIR` โ€” Only set when building [integration test] or benchmark code. - This is a path to a directory inside the target directory - where integration tests or benchmarks are free to put any data needed by - the tests/benches. Cargo initially creates this directory but doesn't - manage its content in any way, this is the responsibility of the test code. - -[integration test]: cargo-targets.md#integration-tests -[`env` macro]: ../../std/macro.env.html - -#### Dynamic library paths - -Cargo also sets the dynamic library path when compiling and running binaries -with commands like `cargo run` and `cargo test`. This helps with locating -shared libraries that are part of the build process. The variable name depends -on the platform: - -* Windows: `PATH` -* macOS: `DYLD_FALLBACK_LIBRARY_PATH` -* Unix: `LD_LIBRARY_PATH` - -The value is extended from the existing value when Cargo starts. macOS has -special consideration where if `DYLD_FALLBACK_LIBRARY_PATH` is not already -set, it will add the default `$HOME/lib:/usr/local/lib:/usr/lib`. - -Cargo includes the following paths: - -* Search paths included from any build script with the [`rustc-link-search` - instruction](build-scripts.md#rustc-link-search). Paths outside of the - `target` directory are removed. It is the responsibility of the user running - Cargo to properly set the environment if additional libraries on the system - are needed in the search path. -* The base output directory, such as `target/debug`, and the "deps" directory. - This is mostly for legacy support of `rustc` compiler plugins. -* The rustc sysroot library path. This generally is not important to most - users. - -### Environment variables Cargo sets for build scripts - -Cargo sets several environment variables when build scripts are run. Because these variables -are not yet set when the build script is compiled, the above example using `env!` won't work -and instead you'll need to retrieve the values when the build script is run: - -```rust,ignore -use std::env; -let out_dir = env::var("OUT_DIR").unwrap(); -``` - -`out_dir` will now contain the value of `OUT_DIR`. - -* `CARGO` โ€” Path to the `cargo` binary performing the build. -* `CARGO_MANIFEST_DIR` โ€” The directory containing the manifest for the package - being built (the package containing the build - script). Also note that this is the value of the - current working directory of the build script when it - starts. -* `CARGO_MANIFEST_LINKS` โ€” the manifest `links` value. -* `CARGO_MAKEFLAGS` โ€” Contains parameters needed for Cargo's [jobserver] - implementation to parallelize subprocesses. - Rustc or cargo invocations from build.rs can already - read `CARGO_MAKEFLAGS`, but GNU Make requires the - flags to be specified either directly as arguments, - or through the `MAKEFLAGS` environment variable. - Currently Cargo doesn't set the `MAKEFLAGS` variable, - but it's free for build scripts invoking GNU Make - to set it to the contents of `CARGO_MAKEFLAGS`. -* `CARGO_FEATURE_` โ€” For each activated feature of the package being - built, this environment variable will be present - where `` is the name of the feature uppercased - and having `-` translated to `_`. -* `CARGO_CFG_` โ€” For each [configuration option][configuration] of the - package being built, this environment variable will contain the value of the - configuration, where `` is the name of the configuration uppercased and - having `-` translated to `_`. Boolean configurations are present if they are - set, and not present otherwise. Configurations with multiple values are - joined to a single variable with the values delimited by `,`. This includes - values built-in to the compiler (which can be seen with `rustc --print=cfg`) - and values set by build scripts and extra flags passed to `rustc` (such as - those defined in `RUSTFLAGS`). Some examples of what these variables are: - * `CARGO_CFG_UNIX` โ€” Set on [unix-like platforms]. - * `CARGO_CFG_WINDOWS` โ€” Set on [windows-like platforms]. - * `CARGO_CFG_TARGET_FAMILY=unix` โ€” The [target family], either `unix` or `windows`. - * `CARGO_CFG_TARGET_OS=macos` โ€” The [target operating system]. - * `CARGO_CFG_TARGET_ARCH=x86_64` โ€” The CPU [target architecture]. - * `CARGO_CFG_TARGET_VENDOR=apple` โ€” The [target vendor]. - * `CARGO_CFG_TARGET_ENV=gnu` โ€” The [target environment] ABI. - * `CARGO_CFG_TARGET_POINTER_WIDTH=64` โ€” The CPU [pointer width]. - * `CARGO_CFG_TARGET_ENDIAN=little` โ€” The CPU [target endianness]. - * `CARGO_CFG_TARGET_FEATURE=mmx,sse` โ€” List of CPU [target features] enabled. -* `OUT_DIR` โ€” the folder in which all output should be placed. This folder is - inside the build directory for the package being built, and it is - unique for the package in question. -* `TARGET` โ€” the target triple that is being compiled for. Native code should be - compiled for this triple. See the [Target Triple] description - for more information. -* `HOST` โ€” the host triple of the Rust compiler. -* `NUM_JOBS` โ€” the parallelism specified as the top-level parallelism. This can - be useful to pass a `-j` parameter to a system like `make`. Note - that care should be taken when interpreting this environment - variable. For historical purposes this is still provided but - recent versions of Cargo, for example, do not need to run `make - -j`, and instead can set the `MAKEFLAGS` env var to the content - of `CARGO_MAKEFLAGS` to activate the use of Cargo's GNU Make - compatible [jobserver] for sub-make invocations. -* `OPT_LEVEL`, `DEBUG` โ€” values of the corresponding variables for the - profile currently being built. -* `PROFILE` โ€” `release` for release builds, `debug` for other builds. This is - determined based on if the [profile] inherits from the [`dev`] or - [`release`] profile. Using this environment variable is not recommended. - Using other environment variables like `OPT_LEVEL` provide a more correct - view of the actual settings being used. -* `DEP__` โ€” For more information about this set of environment - variables, see build script documentation about [`links`][links]. -* `RUSTC`, `RUSTDOC` โ€” the compiler and documentation generator that Cargo has - resolved to use, passed to the build script so it might - use it as well. -* `RUSTC_WRAPPER` โ€” the `rustc` wrapper, if any, that Cargo is using. - See [`build.rustc-wrapper`]. -* `RUSTC_WORKSPACE_WRAPPER` โ€” the `rustc` wrapper, if any, that Cargo is - using for workspace members. See - [`build.rustc-workspace-wrapper`]. -* `RUSTC_LINKER` โ€” The path to the linker binary that Cargo has resolved to use - for the current target, if specified. The linker can be - changed by editing `.cargo/config.toml`; see the documentation - about [cargo configuration][cargo-config] for more - information. -* `CARGO_ENCODED_RUSTFLAGS` โ€” extra flags that Cargo invokes `rustc` with, - separated by a `0x1f` character (ASCII Unit Separator). See - [`build.rustflags`]. Note that since Rust 1.55, `RUSTFLAGS` is removed from - the environment; scripts should use `CARGO_ENCODED_RUSTFLAGS` instead. -* `CARGO_PKG_` - The package information variables, with the same names and values as are [provided during crate building][variables set for crates]. - -[unix-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows -[windows-like platforms]: ../../reference/conditional-compilation.html#unix-and-windows -[target family]: ../../reference/conditional-compilation.html#target_family -[target operating system]: ../../reference/conditional-compilation.html#target_os -[target architecture]: ../../reference/conditional-compilation.html#target_arch -[target vendor]: ../../reference/conditional-compilation.html#target_vendor -[target environment]: ../../reference/conditional-compilation.html#target_env -[pointer width]: ../../reference/conditional-compilation.html#target_pointer_width -[target endianness]: ../../reference/conditional-compilation.html#target_endian -[target features]: ../../reference/conditional-compilation.html#target_feature -[links]: build-scripts.md#the-links-manifest-key -[configuration]: ../../reference/conditional-compilation.html -[jobserver]: https://www.gnu.org/software/make/manual/html_node/Job-Slots.html -[cargo-config]: config.md -[Target Triple]: ../appendix/glossary.md#target -[variables set for crates]: #environment-variables-cargo-sets-for-crates -[profile]: profiles.md -[`dev`]: profiles.md#dev -[`release`]: profiles.md#release - -### Environment variables Cargo sets for 3rd party subcommands - -Cargo exposes this environment variable to 3rd party subcommands -(ie. programs named `cargo-foobar` placed in `$PATH`): - -* `CARGO` โ€” Path to the `cargo` binary performing the build. - -For extended information about your environment you may run `cargo metadata`. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/external-tools.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/external-tools.md deleted file mode 100644 index 58f4787d1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/external-tools.md +++ /dev/null @@ -1,284 +0,0 @@ -## External tools - -One of the goals of Cargo is simple integration with third-party tools, like -IDEs and other build systems. To make integration easier, Cargo has several -facilities: - -* a [`cargo metadata`] command, which outputs package structure and dependencies - information in JSON, - -* a `--message-format` flag, which outputs information about a particular build, - and - -* support for custom subcommands. - - -### Information about package structure - -You can use [`cargo metadata`] command to get information about package -structure and dependencies. See the [`cargo metadata`] documentation -for details on the format of the output. - -The format is stable and versioned. When calling `cargo metadata`, you should -pass `--format-version` flag explicitly to avoid forward incompatibility -hazard. - -If you are using Rust, the [cargo_metadata] crate can be used to parse the -output. - -[cargo_metadata]: https://crates.io/crates/cargo_metadata -[`cargo metadata`]: ../commands/cargo-metadata.md - -### JSON messages - -When passing `--message-format=json`, Cargo will output the following -information during the build: - -* compiler errors and warnings, - -* produced artifacts, - -* results of the build scripts (for example, native dependencies). - -The output goes to stdout in the JSON object per line format. The `reason` field -distinguishes different kinds of messages. - -The `--message-format` option can also take additional formatting values which -alter the way the JSON messages are computed and rendered. See the description -of the `--message-format` option in the [build command documentation] for more -details. - -If you are using Rust, the [cargo_metadata] crate can be used to parse these -messages. - -[build command documentation]: ../commands/cargo-build.md -[cargo_metadata]: https://crates.io/crates/cargo_metadata - -#### Compiler messages - -The "compiler-message" message includes output from the compiler, such as -warnings and errors. See the [rustc JSON chapter](../../rustc/json.md) for -details on `rustc`'s message format, which is embedded in the following -structure: - -```javascript -{ - /* The "reason" indicates the kind of message. */ - "reason": "compiler-message", - /* The Package ID, a unique identifier for referring to the package. */ - "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* Absolute path to the package manifest. */ - "manifest_path": "/path/to/my-package/Cargo.toml", - /* The Cargo target (lib, bin, example, etc.) that generated the message. */ - "target": { - /* Array of target kinds. - - lib targets list the `crate-type` values from the - manifest such as "lib", "rlib", "dylib", - "proc-macro", etc. (default ["lib"]) - - binary is ["bin"] - - example is ["example"] - - integration test is ["test"] - - benchmark is ["bench"] - - build script is ["custom-build"] - */ - "kind": [ - "lib" - ], - /* Array of crate types. - - lib and example libraries list the `crate-type` values - from the manifest such as "lib", "rlib", "dylib", - "proc-macro", etc. (default ["lib"]) - - all other target kinds are ["bin"] - */ - "crate_types": [ - "lib" - ], - /* The name of the target. */ - "name": "my-package", - /* Absolute path to the root source file of the target. */ - "src_path": "/path/to/my-package/src/lib.rs", - /* The Rust edition of the target. - Defaults to the package edition. - */ - "edition": "2018", - /* Array of required features. - This property is not included if no required features are set. - */ - "required-features": ["feat1"], - /* Whether or not this target has doc tests enabled, and - the target is compatible with doc testing. - */ - "doctest": true - }, - /* The message emitted by the compiler. - - See https://doc.rust-lang.org/rustc/json.html for details. - */ - "message": { - /* ... */ - } -} -``` - -#### Artifact messages - -For every compilation step, a "compiler-artifact" message is emitted with the -following structure: - -```javascript -{ - /* The "reason" indicates the kind of message. */ - "reason": "compiler-artifact", - /* The Package ID, a unique identifier for referring to the package. */ - "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* Absolute path to the package manifest. */ - "manifest_path": "/path/to/my-package/Cargo.toml", - /* The Cargo target (lib, bin, example, etc.) that generated the artifacts. - See the definition above for `compiler-message` for details. - */ - "target": { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "name": "my-package", - "src_path": "/path/to/my-package/src/lib.rs", - "edition": "2018", - "doctest": true, - "test": true - }, - /* The profile indicates which compiler settings were used. */ - "profile": { - /* The optimization level. */ - "opt_level": "0", - /* The debug level, an integer of 0, 1, or 2. If `null`, it implies - rustc's default of 0. - */ - "debuginfo": 2, - /* Whether or not debug assertions are enabled. */ - "debug_assertions": true, - /* Whether or not overflow checks are enabled. */ - "overflow_checks": true, - /* Whether or not the `--test` flag is used. */ - "test": false - }, - /* Array of features enabled. */ - "features": ["feat1", "feat2"], - /* Array of files generated by this step. */ - "filenames": [ - "/path/to/my-package/target/debug/libmy_package.rlib", - "/path/to/my-package/target/debug/deps/libmy_package-be9f3faac0a26ef0.rmeta" - ], - /* A string of the path to the executable that was created, or null if - this step did not generate an executable. - */ - "executable": null, - /* Whether or not this step was actually executed. - When `true`, this means that the pre-existing artifacts were - up-to-date, and `rustc` was not executed. When `false`, this means that - `rustc` was run to generate the artifacts. - */ - "fresh": true -} - -``` - -#### Build script output - -The "build-script-executed" message includes the parsed output of a build -script. Note that this is emitted even if the build script is not run; it will -display the previously cached value. More details about build script output -may be found in [the chapter on build scripts](build-scripts.md). - -```javascript -{ - /* The "reason" indicates the kind of message. */ - "reason": "build-script-executed", - /* The Package ID, a unique identifier for referring to the package. */ - "package_id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* Array of libraries to link, as indicated by the `cargo:rustc-link-lib` - instruction. Note that this may include a "KIND=" prefix in the string - where KIND is the library kind. - */ - "linked_libs": ["foo", "static=bar"], - /* Array of paths to include in the library search path, as indicated by - the `cargo:rustc-link-search` instruction. Note that this may include a - "KIND=" prefix in the string where KIND is the library kind. - */ - "linked_paths": ["/some/path", "native=/another/path"], - /* Array of cfg values to enable, as indicated by the `cargo:rustc-cfg` - instruction. - */ - "cfgs": ["cfg1", "cfg2=\"string\""], - /* Array of [KEY, VALUE] arrays of environment variables to set, as - indicated by the `cargo:rustc-env` instruction. - */ - "env": [ - ["SOME_KEY", "some value"], - ["ANOTHER_KEY", "another value"] - ], - /* An absolute path which is used as a value of `OUT_DIR` environmental - variable when compiling current package. - */ - "out_dir": "/some/path/in/target/dir" -} -``` - -#### Build finished - -The "build-finished" message is emitted at the end of the build. - -```javascript -{ - /* The "reason" indicates the kind of message. */ - "reason": "build-finished", - /* Whether or not the build finished successfully. */ - "success": true, -} -```` - -This message can be helpful for tools to know when to stop reading JSON -messages. Commands such as `cargo test` or `cargo run` can produce additional -output after the build has finished. This message lets a tool know that Cargo -will not produce additional JSON messages, but there may be additional output -that may be generated afterwards (such as the output generated by the program -executed by `cargo run`). - -> Note: There is experimental nightly-only support for JSON output for tests, -> so additional test-specific JSON messages may begin arriving after the -> "build-finished" message if that is enabled. - -### Custom subcommands - -Cargo is designed to be extensible with new subcommands without having to modify -Cargo itself. This is achieved by translating a cargo invocation of the form -cargo `(?[^ ]+)` into an invocation of an external tool -`cargo-${command}`. The external tool must be present in one of the user's -`$PATH` directories. - -When Cargo invokes a custom subcommand, the first argument to the subcommand -will be the filename of the custom subcommand, as usual. The second argument -will be the subcommand name itself. For example, the second argument would be -`${command}` when invoking `cargo-${command}`. Any additional arguments on the -command line will be forwarded unchanged. - -Cargo can also display the help output of a custom subcommand with `cargo help -${command}`. Cargo assumes that the subcommand will print a help message if its -third argument is `--help`. So, `cargo help ${command}` would invoke -`cargo-${command} ${command} --help`. - -Custom subcommands may use the `CARGO` environment variable to call back to -Cargo. Alternatively, it can link to `cargo` crate as a library, but this -approach has drawbacks: - -* Cargo as a library is unstable: the API may change without deprecation -* versions of the linked Cargo library may be different from the Cargo binary - -Instead, it is encouraged to use the CLI interface to drive Cargo. The [`cargo -metadata`] command can be used to obtain information about the current project -(the [`cargo_metadata`] crate provides a Rust interface to this command). - -[`cargo metadata`]: ../commands/cargo-metadata.md -[`cargo_metadata`]: https://crates.io/crates/cargo_metadata diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/features-examples.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/features-examples.md deleted file mode 100644 index ac9636fcb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/features-examples.md +++ /dev/null @@ -1,187 +0,0 @@ -## Features Examples - -The following illustrates some real-world examples of features in action. - -### Minimizing build times and file sizes - -Some packages use features so that if the features are not enabled, it reduces -the size of the crate and reduces compile time. Some examples are: - -* [`syn`] is a popular crate for parsing Rust code. Since it is so popular, it - is helpful to reduce compile times since it affects so many projects. It has - a [clearly documented list][syn-features] of features which can be used to - minimize the amount of code it contains. -* [`regex`] has a [several features][regex-features] that are [well - documented][regex-docs]. Cutting out Unicode support can reduce the - resulting file size as it can remove some large tables. -* [`winapi`] has [a large number][winapi-features] of features that - limit which Windows API bindings it supports. -* [`web-sys`] is another example similar to `winapi` that provides a [huge - surface area][web-sys-features] of API bindings that are limited by using - features. - -[`winapi`]: https://crates.io/crates/winapi -[winapi-features]: https://github.com/retep998/winapi-rs/blob/0.3.9/Cargo.toml#L25-L431 -[`regex`]: https://crates.io/crates/regex -[`syn`]: https://crates.io/crates/syn -[syn-features]: https://docs.rs/syn/1.0.54/syn/#optional-features -[regex-features]: https://github.com/rust-lang/regex/blob/1.4.2/Cargo.toml#L33-L101 -[regex-docs]: https://docs.rs/regex/1.4.2/regex/#crate-features -[`web-sys`]: https://crates.io/crates/web-sys -[web-sys-features]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/crates/web-sys/Cargo.toml#L32-L1395 - -### Extending behavior - -The [`serde_json`] package has a [`preserve_order` feature][serde_json-preserve_order] -which [changes the behavior][serde_json-code] of JSON maps to preserve the -order that keys are inserted. Notice that it enables an optional dependency -[`indexmap`] to implement the new behavior. - -When changing behavior like this, be careful to make sure the changes are -[SemVer compatible]. That is, enabling the feature should not break code that -usually builds with the feature off. - -[`serde_json`]: https://crates.io/crates/serde_json -[serde_json-preserve_order]: https://github.com/serde-rs/json/blob/v1.0.60/Cargo.toml#L53-L56 -[SemVer compatible]: features.md#semver-compatibility -[serde_json-code]: https://github.com/serde-rs/json/blob/v1.0.60/src/map.rs#L23-L26 -[`indexmap`]: https://crates.io/crates/indexmap - -### `no_std` support - -Some packages want to support both [`no_std`] and `std` environments. This is -useful for supporting embedded and resource-constrained platforms, but still -allowing extended capabilities for platforms that support the full standard -library. - -The [`wasm-bindgen`] package defines a [`std` feature][wasm-bindgen-std] that -is [enabled by default][wasm-bindgen-default]. At the top of the library, it -[unconditionally enables the `no_std` attribute][wasm-bindgen-no_std]. This -ensures that `std` and the [`std` prelude] are not automatically in scope. -Then, in various places in the code ([example1][wasm-bindgen-cfg1], -[example2][wasm-bindgen-cfg2]), it uses `#[cfg(feature = "std")]` attributes -to conditionally enable extra functionality that requires `std`. - -[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute -[`wasm-bindgen`]: https://crates.io/crates/wasm-bindgen -[`std` prelude]: ../../std/prelude/index.html -[wasm-bindgen-std]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L25 -[wasm-bindgen-default]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L23 -[wasm-bindgen-no_std]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L8 -[wasm-bindgen-cfg1]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L270-L273 -[wasm-bindgen-cfg2]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L67-L75 - -### Re-exporting dependency features - -It can be convenient to re-export the features from a dependency. This allows -the user depending on the crate to control those features without needing to -specify those dependencies directly. For example, [`regex`] [re-exports the -features][regex-re-export] from the [`regex_syntax`][regex_syntax-features] -package. Users of `regex` don't need to know about the `regex_syntax` package, -but they can still access the features it contains. - -[regex-re-export]: https://github.com/rust-lang/regex/blob/1.4.2/Cargo.toml#L65-L89 -[regex_syntax-features]: https://github.com/rust-lang/regex/blob/1.4.2/regex-syntax/Cargo.toml#L17-L32 - -### Vendoring of C libraries - -Some packages provide bindings to common C libraries (sometimes referred to as -["sys" crates][sys]). Sometimes these packages give you the choice to use the -C library installed on the system, or to build it from source. For example, -the [`openssl`] package has a [`vendored` feature][openssl-vendored] which -enables the corresponding `vendored` feature of [`openssl-sys`]. The -`openssl-sys` build script has some [conditional logic][openssl-sys-cfg] which -causes it to build from a local copy of the OpenSSL source code instead of -using the version from the system. - -The [`curl-sys`] package is another example where the [`static-curl` -feature][curl-sys-static] causes it to build libcurl from source. Notice that -it also has a [`force-system-lib-on-osx`][curl-sys-macos] feature which forces -it [to use the system libcurl][curl-sys-macos-code], overriding the -static-curl setting. - -[`openssl`]: https://crates.io/crates/openssl -[`openssl-sys`]: https://crates.io/crates/openssl-sys -[sys]: build-scripts.md#-sys-packages -[openssl-vendored]: https://github.com/sfackler/rust-openssl/blob/openssl-v0.10.31/openssl/Cargo.toml#L19 -[build script]: build-scripts.md -[openssl-sys-cfg]: https://github.com/sfackler/rust-openssl/blob/openssl-v0.10.31/openssl-sys/build/main.rs#L47-L54 -[`curl-sys`]: https://crates.io/crates/curl-sys -[curl-sys-static]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/Cargo.toml#L49 -[curl-sys-macos]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/Cargo.toml#L52 -[curl-sys-macos-code]: https://github.com/alexcrichton/curl-rust/blob/0.4.34/curl-sys/build.rs#L15-L20 - -### Feature precedence - -Some packages may have mutually-exclusive features. One option to handle this -is to prefer one feature over another. The [`log`] package is an example. It -has [several features][log-features] for choosing the maximum logging level at -compile-time described [here][log-docs]. It uses [`cfg-if`] to [choose a -precedence][log-cfg-if]. If multiple features are enabled, the higher "max" -levels will be preferred over the lower levels. - -[`log`]: https://crates.io/crates/log -[log-features]: https://github.com/rust-lang/log/blob/0.4.11/Cargo.toml#L29-L42 -[log-docs]: https://docs.rs/log/0.4.11/log/#compile-time-filters -[log-cfg-if]: https://github.com/rust-lang/log/blob/0.4.11/src/lib.rs#L1422-L1448 -[`cfg-if`]: https://crates.io/crates/cfg-if - -### Proc-macro companion package - -Some packages have a proc-macro that is intimately tied with it. However, not -all users will need to use the proc-macro. By making the proc-macro an -optional-dependency, this allows you to conveniently choose whether or not it -is included. This is helpful, because sometimes the proc-macro version must -stay in sync with the parent package, and you don't want to force the users to -have to specify both dependencies and keep them in sync. - -An example is [`serde`] which has a [`derive`][serde-derive] feature which -enables the [`serde_derive`] proc-macro. The `serde_derive` crate is very -tightly tied to `serde`, so it uses an [equals version -requirement][serde-equals] to ensure they stay in sync. - -[`serde`]: https://crates.io/crates/serde -[`serde_derive`]: https://crates.io/crates/serde_derive -[serde-derive]: https://github.com/serde-rs/serde/blob/v1.0.118/serde/Cargo.toml#L34-L35 -[serde-equals]: https://github.com/serde-rs/serde/blob/v1.0.118/serde/Cargo.toml#L17 - -### Nightly-only features - -Some packages want to experiment with APIs or language features that are only -available on the Rust [nightly channel]. However, they may not want to require -their users to also use the nightly channel. An example is [`wasm-bindgen`] -which has a [`nightly` feature][wasm-bindgen-nightly] which enables an -[extended API][wasm-bindgen-unsize] that uses the [`Unsize`] marker trait that -is only available on the nightly channel at the time of this writing. - -Note that at the root of the crate it uses [`cfg_attr` to enable the nightly -feature][wasm-bindgen-cfg_attr]. Keep in mind that the [`feature` attribute] -is unrelated to Cargo features, and is used to opt-in to experimental language -features. - -The [`simd_support` feature][rand-simd_support] of the [`rand`] package is another example, -which relies on a dependency that only builds on the nightly channel. - -[`wasm-bindgen`]: https://crates.io/crates/wasm-bindgen -[nightly channel]: ../../book/appendix-07-nightly-rust.html -[wasm-bindgen-nightly]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/Cargo.toml#L27 -[wasm-bindgen-unsize]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/closure.rs#L257-L269 -[`Unsize`]: ../../std/marker/trait.Unsize.html -[wasm-bindgen-cfg_attr]: https://github.com/rustwasm/wasm-bindgen/blob/0.2.69/src/lib.rs#L11 -[`feature` attribute]: ../../unstable-book/index.html -[`rand`]: https://crates.io/crates/rand -[rand-simd_support]: https://github.com/rust-random/rand/blob/0.7.3/Cargo.toml#L40 - -### Experimental features - -Some packages have new functionality that they may want to experiment with, -without having to commit to the stability of those APIs. The features are -usually documented that they are experimental, and thus may change or break in -the future, even during a minor release. An example is the [`async-std`] -package, which has an [`unstable` feature][async-std-unstable], which [gates -new APIs][async-std-gate] that people can opt-in to using, but may not be -completely ready to be relied upon. - -[`async-std`]: https://crates.io/crates/async-std -[async-std-unstable]: https://github.com/async-rs/async-std/blob/v1.8.0/Cargo.toml#L38-L42 -[async-std-gate]: https://github.com/async-rs/async-std/blob/v1.8.0/src/macros.rs#L46 diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/features.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/features.md deleted file mode 100644 index 0defb30b7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/features.md +++ /dev/null @@ -1,479 +0,0 @@ -## Features - -Cargo "features" provide a mechanism to express [conditional compilation] and -[optional dependencies](#optional-dependencies). A package defines a set of -named features in the `[features]` table of `Cargo.toml`, and each feature can -either be enabled or disabled. Features for the package being built can be -enabled on the command-line with flags such as `--features`. Features for -dependencies can be enabled in the dependency declaration in `Cargo.toml`. - -See also the [Features Examples] chapter for some examples of how features can -be used. - -[conditional compilation]: ../../reference/conditional-compilation.md -[Features Examples]: features-examples.md - -### The `[features]` section - -Features are defined in the `[features]` table in `Cargo.toml`. Each feature -specifies an array of other features or optional dependencies that it enables. -The following examples illustrate how features could be used for a 2D image -processing library where support for different image formats can be optionally -included: - -```toml -[features] -# Defines a feature named `webp` that does not enable any other features. -webp = [] -``` - -With this feature defined, [`cfg` expressions] can be used to conditionally -include code to support the requested feature at compile time. For example, -inside `lib.rs` of the package could include this: - -```rust -// This conditionally includes a module which implements WEBP support. -#[cfg(feature = "webp")] -pub mod webp; -``` - -Cargo sets features in the package using the `rustc` [`--cfg` flag], and code -can test for their presence with the [`cfg` attribute] or the [`cfg` macro]. - -Features can list other features to enable. For example, the ICO image format -can contain BMP and PNG images, so when it is enabled, it should make sure -those other features are enabled, too: - -```toml -[features] -bmp = [] -png = [] -ico = ["bmp", "png"] -webp = [] -``` - -Feature names may include characters from the [Unicode XID standard] (which -includes most letters), and additionally allows starting with `_` or digits -`0` through `9`, and after the first character may also contain `-`, `+`, or -`.`. - -> **Note**: [crates.io] imposes additional constraints on feature name syntax -> that they must only be [ASCII alphanumeric] characters or `_`, `-`, or `+`. - -[crates.io]: https://crates.io/ -[Unicode XID standard]: https://unicode.org/reports/tr31/ -[ASCII alphanumeric]: ../../std/primitive.char.html#method.is_ascii_alphanumeric -[`--cfg` flag]: ../../rustc/command-line-arguments.md#option-cfg -[`cfg` expressions]: ../../reference/conditional-compilation.md -[`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute -[`cfg` macro]: ../../std/macro.cfg.html - -### The `default` feature - -By default, all features are disabled unless explicitly enabled. This can be -changed by specifying the `default` feature: - -```toml -[features] -default = ["ico", "webp"] -bmp = [] -png = [] -ico = ["bmp", "png"] -webp = [] -``` - -When the package is built, the `default` feature is enabled which in turn -enables the listed features. This behavior can be changed by: - -* The `--no-default-features` [command-line - flag](#command-line-feature-options) disables the default features of the - package. -* The `default-features = false` option can be specified in a [dependency - declaration](#dependency-features). - -> **Note**: Be careful about choosing the default feature set. The default -> features are a convenience that make it easier to use a package without -> forcing the user to carefully select which features to enable for common -> use, but there are some drawbacks. Dependencies automatically enable default -> features unless `default-features = false` is specified. This can make it -> difficult to ensure that the default features are not enabled, especially -> for a dependency that appears multiple times in the dependency graph. Every -> package must ensure that `default-features = false` is specified to avoid -> enabling them. -> -> Another issue is that it can be a [SemVer incompatible -> change](#semver-compatibility) to remove a feature from the default set, so -> you should be confident that you will keep those features. - -### Optional dependencies - -Dependencies can be marked "optional", which means they will not be compiled -by default. For example, let's say that our 2D image processing library uses -an external package to handle GIF images. This can be expressed like this: - -```toml -[dependencies] -gif = { version = "0.11.1", optional = true } -``` - -Optional dependencies implicitly define a feature of the same name as the -dependency. This means that the same `cfg(feature = "gif")` syntax can be used -in the code, and the dependency can be enabled just like a feature such as -`--features gif` (see [Command-line feature -options](#command-line-feature-options) below). - -> **Note**: A feature in the `[feature]` table cannot use the same name as a -> dependency. Experimental support for enabling this and other extensions is -> available on the nightly channel via [namespaced -> features](unstable.md#namespaced-features). - -Explicitly defined features can enable optional dependencies, too. Just -include the name of the optional dependency in the feature list. For example, -let's say in order to support the AVIF image format, our library needs two -other dependencies to be enabled: - -```toml -[dependencies] -ravif = { version = "0.6.3", optional = true } -rgb = { version = "0.8.25", optional = true } - -[features] -avif = ["ravif", "rgb"] -``` - -In this example, the `avif` feature will enable the two listed dependencies. - -> **Note**: Another way to optionally include a dependency is to use -> [platform-specific dependencies]. Instead of using features, these are -> conditional based on the target platform. - -[platform-specific dependencies]: specifying-dependencies.md#platform-specific-dependencies - -### Dependency features - -Features of dependencies can be enabled within the dependency declaration. The -`features` key indicates which features to enable: - -```toml -[dependencies] -# Enables the `derive` feature of serde. -serde = { version = "1.0.118", features = ["derive"] } -``` - -The [`default` features](#the-default-feature) can be disabled using -`default-features = false`: - -```toml -[dependencies] -flate2 = { version = "1.0.3", default-features = false, features = ["zlib"] } -``` - -> **Note**: This may not ensure the default features are disabled. If another -> dependency includes `flate2` without specifying `default-features = false`, -> then the default features will be enabled. See [feature -> unification](#feature-unification) below for more details. - -Features of dependencies can also be enabled in the `[features]` table. The -syntax is `"package-name/feature-name"`. For example: - -```toml -[dependencies] -jpeg-decoder = { version = "0.1.20", default-features = false } - -[features] -# Enables parallel processing support by enabling the "rayon" feature of jpeg-decoder. -parallel = ["jpeg-decoder/rayon"] -``` - -> **Note**: The `"package-name/feature-name"` syntax will also enable -> `package-name` if it is an optional dependency. Experimental support for -> disabling that behavior is available on the nightly channel via [weak -> dependency features](unstable.md#weak-dependency-features). - -### Command-line feature options - -The following command-line flags can be used to control which features are -enabled: - -* `--features` _FEATURES_: Enables the listed features. Multiple features may - be separated with commas or spaces. If using spaces, be sure to use quotes - around all the features if running Cargo from a shell (such as `--features - "foo bar"`). If building multiple packages in a [workspace], the - `package-name/feature-name` syntax can be used to specify features for - specific workspace members. - -* `--all-features`: Activates all features of all packages selected on the - command-line. - -* `--no-default-features`: Does not activate the [`default` - feature](#the-default-feature) of the selected packages. - -[workspace]: workspaces.md - -### Feature unification - -Features are unique to the package that defines them. Enabling a feature on a -package does not enable a feature of the same name on other packages. - -When a dependency is used by multiple packages, Cargo will use the union of -all features enabled on that dependency when building it. This helps ensure -that only a single copy of the dependency is used. See the [features section] -of the resolver documentation for more details. - -For example, let's look at the [`winapi`] package which uses a [large -number][winapi-features] of features. If your package depends on a package -`foo` which enables the "fileapi" and "handleapi" features of `winapi`, and -another dependency `bar` which enables the "std" and "winnt" features of -`winapi`, then `winapi` will be built with all four of those features enabled. - -![winapi features example](../images/winapi-features.svg) - -[`winapi`]: https://crates.io/crates/winapi -[winapi-features]: https://github.com/retep998/winapi-rs/blob/0.3.9/Cargo.toml#L25-L431 - -A consequence of this is that features should be *additive*. That is, enabling -a feature should not disable functionality, and it should usually be safe to -enable any combination of features. A feature should not introduce a -[SemVer-incompatible change](#semver-compatibility). - -For example, if you want to optionally support [`no_std`] environments, **do -not** use a `no_std` feature. Instead, use a `std` feature that *enables* -`std`. For example: - -```rust -#![no_std] - -#[cfg(feature = "std")] -extern crate std; - -#[cfg(feature = "std")] -pub fn function_that_requires_std() { - // ... -} -``` - -[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute -[features section]: resolver.md#features - -#### Mutually exclusive features - -There are rare cases where features may be mutually incompatible with one -another. This should be avoided if at all possible, because it requires -coordinating all uses of the package in the dependency graph to cooperate to -avoid enabling them together. If it is not possible, consider adding a compile -error to detect this scenario. For example: - -```rust,ignore -#[cfg(all(feature = "foo", feature = "bar"))] -compile_error!("feature \"foo\" and feature \"bar\" cannot be enabled at the same time"); -``` - -Instead of using mutually exclusive features, consider some other options: - -* Split the functionality into separate packages. -* When there is a conflict, [choose one feature over - another][feature-precedence]. The [`cfg-if`] package can help with writing - more complex `cfg` expressions. -* Architect the code to allow the features to be enabled concurrently, and use - runtime options to control which is used. For example, use a config file, - command-line argument, or environment variable to choose which behavior to - enable. - -[`cfg-if`]: https://crates.io/crates/cfg-if -[feature-precedence]: features-examples.md#feature-precedence - -#### Inspecting resolved features - -In complex dependency graphs, it can sometimes be difficult to understand how -different features get enabled on various packages. The [`cargo tree`] command -offers several options to help inspect and visualize which features are -enabled. Some options to try: - -* `cargo tree -e features`: This will show features in the dependency graph. - Each feature will appear showing which package enabled it. -* `cargo tree -f "{p} {f}"`: This is a more compact view that shows a - comma-separated list of features enabled on each package. -* `cargo tree -e features -i foo`: This will invert the tree, showing how - features flow into the given package "foo". This can be useful because - viewing the entire graph can be quite large and overwhelming. Use this when - you are trying to figure out which features are enabled on a specific - package and why. See the example at the bottom of the [`cargo tree`] page on - how to read this. - -[`cargo tree`]: ../commands/cargo-tree.md - -### Feature resolver version 2 - -A different feature resolver can be specified with the `resolver` field in -`Cargo.toml`, like this: - -```toml -[package] -name = "my-package" -version = "1.0.0" -resolver = "2" -``` - -See the [resolver versions] section for more detail on specifying resolver -versions. - -The version `"2"` resolver avoids unifying features in a few situations where -that unification can be unwanted. The exact situations are described in the -[resolver chapter][resolver-v2], but in short, it avoids unifying in these -situations: - -* Features enabled on [platform-specific dependencies] for targets not - currently being built are ignored. -* [Build-dependencies] and proc-macros do not share features with normal - dependencies. -* [Dev-dependencies] do not activate features unless building a target that - needs them (like tests or examples). - -Avoiding the unification is necessary for some situations. For example, if a -build-dependency enables a `std` feature, and the same dependency is used as a -normal dependency for a `no_std` environment, enabling `std` would break the -build. - -However, one drawback is that this can increase build times because the -dependency is built multiple times (each with different features). When using -the version `"2"` resolver, it is recommended to check for dependencies that -are built multiple times to reduce overall build time. If it is not *required* -to build those duplicated packages with separate features, consider adding -features to the `features` list in the [dependency -declaration](#dependency-features) so that the duplicates end up with the same -features (and thus Cargo will build it only once). You can detect these -duplicate dependencies with the [`cargo tree --duplicates`][`cargo tree`] -command. It will show which packages are built multiple times; look for any -entries listed with the same version. See [Inspecting resolved -features](#inspecting-resolved-features) for more on fetching information on -the resolved features. For build dependencies, this is not necessary if you -are cross-compiling with the `--target` flag because build dependencies are -always built separately from normal dependencies in that scenario. - -#### Resolver version 2 command-line flags - -The `resolver = "2"` setting also changes the behavior of the `--features` and -`--no-default-features` [command-line options](#command-line-feature-options). - -With version `"1"`, you can only enable features for the package in the -current working directory. For example, in a workspace with packages `foo` and -`bar`, and you are in the directory for package `foo`, and ran the command -`cargo build -p bar --features bar-feat`, this would fail because the -`--features` flag only allowed enabling features on `foo`. - -With `resolver = "2"`, the features flags allow enabling features for any of -the packages selected on the command-line with `-p` and `--workspace` flags. -For example: - -```sh -# This command is allowed with resolver = "2", regardless of which directory -# you are in. -cargo build -p foo -p bar --features foo-feat,bar-feat - -# This explicit equivalent works with any resolver version: -cargo build -p foo -p bar --features foo/foo-feat,bar/bar-feat -``` - -Additionally, with `resolver = "1"`, the `--no-default-features` flag only -disables the default feature for the package in the current directory. With -version "2", it will disable the default features for all workspace members. - -[resolver versions]: resolver.md#resolver-versions -[build-dependencies]: specifying-dependencies.md#build-dependencies -[dev-dependencies]: specifying-dependencies.md#development-dependencies -[resolver-v2]: resolver.md#feature-resolver-version-2 - -### Build scripts - -[Build scripts] can detect which features are enabled on the package by -inspecting the `CARGO_FEATURE_` environment variable, where `` is -the feature name converted to uppercase and `-` converted to `_`. - -[build scripts]: build-scripts.md - -### Required features - -The [`required-features` field] can be used to disable specific [Cargo -targets] if a feature is not enabled. See the linked documentation for more -details. - -[`required-features` field]: cargo-targets.md#the-required-features-field -[Cargo targets]: cargo-targets.md - -### SemVer compatibility - -Enabling a feature should not introduce a SemVer-incompatible change. For -example, the feature shouldn't change an existing API in a way that could -break existing uses. More details about what changes are compatible can be -found in the [SemVer Compatibility chapter](semver.md). - -Care should be taken when adding and removing feature definitions and optional -dependencies, as these can sometimes be backwards-incompatible changes. More -details can be found in the [Cargo section](semver.md#cargo) of the SemVer -Compatibility chapter. In short, follow these rules: - -* The following is usually safe to do in a minor release: - * Add a [new feature][cargo-feature-add] or [optional dependency][cargo-dep-add]. - * [Change the features used on a dependency][cargo-change-dep-feature]. -* The following should usually **not** be done in a minor release: - * [Remove a feature][cargo-feature-remove] or [optional dependency][cargo-remove-opt-dep]. - * [Moving existing public code behind a feature][item-remove]. - * [Remove a feature from a feature list][cargo-feature-remove-another]. - -See the links for caveats and examples. - -[cargo-change-dep-feature]: semver.md#cargo-change-dep-feature -[cargo-dep-add]: semver.md#cargo-dep-add -[cargo-feature-add]: semver.md#cargo-feature-add -[item-remove]: semver.md#item-remove -[cargo-feature-remove]: semver.md#cargo-feature-remove -[cargo-remove-opt-dep]: semver.md#cargo-remove-opt-dep -[cargo-feature-remove-another]: semver.md#cargo-feature-remove-another - -### Feature documentation and discovery - -You are encouraged to document which features are available in your package. -This can be done by adding [doc comments] at the top of `lib.rs`. As an -example, see the [regex crate source], which when rendered can be viewed on -[docs.rs][regex-docs-rs]. If you have other documentation, such as a user -guide, consider adding the documentation there (for example, see [serde.rs]). -If you have a binary project, consider documenting the features in the README -or other documentation for the project (for example, see [sccache]). - -Clearly documenting the features can set expectations about features that are -considered "unstable" or otherwise shouldn't be used. For example, if there is -an optional dependency, but you don't want users to explicitly list that -optional dependency as a feature, exclude it from the documented list. - -Documentation published on [docs.rs] can use metadata in `Cargo.toml` to -control which features are enabled when the documentation is built. See -[docs.rs metadata documentation] for more details. - -> **Note**: Rustdoc has experimental support for annotating the documentation -> to indicate which features are required to use certain APIs. See the -> [`doc_cfg`] documentation for more details. An example is the [`syn` -> documentation], where you can see colored boxes which note which features -> are required to use it. - -[docs.rs metadata documentation]: https://docs.rs/about/metadata -[docs.rs]: https://docs.rs/ -[serde.rs]: https://serde.rs/feature-flags.html -[doc comments]: ../../rustdoc/how-to-write-documentation.html -[regex crate source]: https://github.com/rust-lang/regex/blob/1.4.2/src/lib.rs#L488-L583 -[regex-docs-rs]: https://docs.rs/regex/1.4.2/regex/#crate-features -[sccache]: https://github.com/mozilla/sccache/blob/0.2.13/README.md#build-requirements -[`doc_cfg`]: ../../unstable-book/language-features/doc-cfg.html -[`syn` documentation]: https://docs.rs/syn/1.0.54/syn/#modules - -#### Discovering features - -When features are documented in the library API, this can make it easier for -your users to discover which features are available and what they do. If the -feature documentation for a package isn't readily available, you can look at -the `Cargo.toml` file, but sometimes it can be hard to track it down. The -crate page on [crates.io] has a link to the source repository if available. -Tools like [`cargo vendor`] or [cargo-clone-crate] can be used to download the -source and inspect it. - -[`cargo vendor`]: ../commands/cargo-vendor.md -[cargo-clone-crate]: https://crates.io/crates/cargo-clone-crate diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/future-incompat-report.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/future-incompat-report.md deleted file mode 100644 index 5ec93618f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/future-incompat-report.md +++ /dev/null @@ -1,24 +0,0 @@ -### Future incompat report - -Cargo checks for future-incompatible warnings in all dependencies. These are warnings for -changes that may become hard errors in the future, causing the dependency to -stop building in a future version of rustc. If any warnings are found, a small -notice is displayed indicating that the warnings were found, and provides -instructions on how to display a full report. - -A full report can be displayed with the `cargo report future-incompatibilities ---id ID` command, or by running the build again with -the `--future-incompat-report` flag. The developer should then update their -dependencies to a version where the issue is fixed, or work with the -developers of the dependencies to help resolve the issue. - -This feature can be configured through a `[future-incompat-report]` -section in `.cargo/config`. Currently, the supported options are: - -``` -[future-incompat-report] -frequency = FREQUENCY -``` - -The supported values for `FREQUENCY` are 'always` and 'never', which control -whether or not a message is printed out at the end of `cargo build` / `cargo check`. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/index.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/index.md deleted file mode 100644 index 298647a57..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/index.md +++ /dev/null @@ -1,25 +0,0 @@ -## Cargo Reference - -The reference covers the details of various areas of Cargo. - -* [Specifying Dependencies](specifying-dependencies.md) - * [Overriding Dependencies](overriding-dependencies.md) -* [The Manifest Format](manifest.md) - * [Cargo Targets](cargo-targets.md) -* [Workspaces](workspaces.md) -* [Features](features.md) - * [Features Examples](features-examples.md) -* [Profiles](profiles.md) -* [Configuration](config.md) -* [Environment Variables](environment-variables.md) -* [Build Scripts](build-scripts.md) - * [Build Script Examples](build-script-examples.md) -* [Publishing on crates.io](publishing.md) -* [Package ID Specifications](pkgid-spec.md) -* [Source Replacement](source-replacement.md) -* [External Tools](external-tools.md) -* [Registries](registries.md) -* [Dependency Resolution](resolver.md) -* [SemVer Compatibility](semver.md) -* [Future incompat report](future-incompat-report.md) -* [Unstable Features](unstable.md) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/manifest.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/manifest.md deleted file mode 100644 index 81ea65ce7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/manifest.md +++ /dev/null @@ -1,616 +0,0 @@ -## The Manifest Format - -The `Cargo.toml` file for each package is called its *manifest*. It is written -in the [TOML] format. Every manifest file consists of the following sections: - -* [`cargo-features`](unstable.md) โ€” Unstable, nightly-only features. -* [`[package]`](#the-package-section) โ€” Defines a package. - * [`name`](#the-name-field) โ€” The name of the package. - * [`version`](#the-version-field) โ€” The version of the package. - * [`authors`](#the-authors-field) โ€” The authors of the package. - * [`edition`](#the-edition-field) โ€” The Rust edition. - * [`rust-version`](#the-rust-version-field) โ€” The minimal supported Rust version. - * [`description`](#the-description-field) โ€” A description of the package. - * [`documentation`](#the-documentation-field) โ€” URL of the package documentation. - * [`readme`](#the-readme-field) โ€” Path to the package's README file. - * [`homepage`](#the-homepage-field) โ€” URL of the package homepage. - * [`repository`](#the-repository-field) โ€” URL of the package source repository. - * [`license`](#the-license-and-license-file-fields) โ€” The package license. - * [`license-file`](#the-license-and-license-file-fields) โ€” Path to the text of the license. - * [`keywords`](#the-keywords-field) โ€” Keywords for the package. - * [`categories`](#the-categories-field) โ€” Categories of the package. - * [`workspace`](#the-workspace-field) โ€” Path to the workspace for the package. - * [`build`](#the-build-field) โ€” Path to the package build script. - * [`links`](#the-links-field) โ€” Name of the native library the package links with. - * [`exclude`](#the-exclude-and-include-fields) โ€” Files to exclude when publishing. - * [`include`](#the-exclude-and-include-fields) โ€” Files to include when publishing. - * [`publish`](#the-publish-field) โ€” Can be used to prevent publishing the package. - * [`metadata`](#the-metadata-table) โ€” Extra settings for external tools. - * [`default-run`](#the-default-run-field) โ€” The default binary to run by [`cargo run`]. - * [`autobins`](cargo-targets.md#target-auto-discovery) โ€” Disables binary auto discovery. - * [`autoexamples`](cargo-targets.md#target-auto-discovery) โ€” Disables example auto discovery. - * [`autotests`](cargo-targets.md#target-auto-discovery) โ€” Disables test auto discovery. - * [`autobenches`](cargo-targets.md#target-auto-discovery) โ€” Disables bench auto discovery. - * [`resolver`](resolver.md#resolver-versions) โ€” Sets the dependency resolver to use. -* Target tables: (see [configuration](cargo-targets.md#configuring-a-target) for settings) - * [`[lib]`](cargo-targets.md#library) โ€” Library target settings. - * [`[[bin]]`](cargo-targets.md#binaries) โ€” Binary target settings. - * [`[[example]]`](cargo-targets.md#examples) โ€” Example target settings. - * [`[[test]]`](cargo-targets.md#tests) โ€” Test target settings. - * [`[[bench]]`](cargo-targets.md#benchmarks) โ€” Benchmark target settings. -* Dependency tables: - * [`[dependencies]`](specifying-dependencies.md) โ€” Package library dependencies. - * [`[dev-dependencies]`](specifying-dependencies.md#development-dependencies) โ€” Dependencies for examples, tests, and benchmarks. - * [`[build-dependencies]`](specifying-dependencies.md#build-dependencies) โ€” Dependencies for build scripts. - * [`[target]`](specifying-dependencies.md#platform-specific-dependencies) โ€” Platform-specific dependencies. -* [`[badges]`](#the-badges-section) โ€” Badges to display on a registry. -* [`[features]`](features.md) โ€” Conditional compilation features. -* [`[patch]`](overriding-dependencies.md#the-patch-section) โ€” Override dependencies. -* [`[replace]`](overriding-dependencies.md#the-replace-section) โ€” Override dependencies (deprecated). -* [`[profile]`](profiles.md) โ€” Compiler settings and optimizations. -* [`[workspace]`](workspaces.md) โ€” The workspace definition. - - -### The `[package]` section - -The first section in a `Cargo.toml` is `[package]`. - -```toml -[package] -name = "hello_world" # the name of the package -version = "0.1.0" # the current version, obeying semver -authors = ["Alice ", "Bob "] -``` - -The only fields required by Cargo are [`name`](#the-name-field) and -[`version`](#the-version-field). If publishing to a registry, the registry may -require additional fields. See the notes below and [the publishing -chapter][publishing] for requirements for publishing to [crates.io]. - -#### The `name` field - -The package name is an identifier used to refer to the package. It is used -when listed as a dependency in another package, and as the default name of -inferred lib and bin targets. - -The name must use only [alphanumeric] characters or `-` or `_`, and cannot be empty. -Note that [`cargo new`] and [`cargo init`] impose some additional restrictions on -the package name, such as enforcing that it is a valid Rust identifier and not -a keyword. [crates.io] imposes even more restrictions, such as -enforcing only ASCII characters, not a reserved name, not a special Windows -name such as "nul", is not too long, etc. - -[alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric - -#### The `version` field - -Cargo bakes in the concept of [Semantic -Versioning](https://semver.org/), so make sure you follow some basic rules: - -* Before you reach 1.0.0, anything goes, but if you make breaking changes, - increment the minor version. In Rust, breaking changes include adding fields to - structs or variants to enums. -* After 1.0.0, only make breaking changes when you increment the major version. - Donโ€™t break the build. -* After 1.0.0, donโ€™t add any new public API (no new `pub` anything) in patch-level - versions. Always increment the minor version if you add any new `pub` structs, - traits, fields, types, functions, methods or anything else. -* Use version numbers with three numeric parts such as 1.0.0 rather than 1.0. - -See the [Resolver] chapter for more information on how Cargo uses versions to -resolve dependencies, and for guidelines on setting your own version. See the -[SemVer compatibility] chapter for more details on exactly what constitutes a -breaking change. - -[Resolver]: resolver.md -[SemVer compatibility]: semver.md - - -#### The `authors` field - -The optional `authors` field lists people or organizations that are considered -the "authors" of the package. The exact meaning is open to interpretation โ€” it -may list the original or primary authors, current maintainers, or owners of the -package. An optional email address may be included within angled brackets at -the end of each author entry. - -This field is only surfaced in package metadata and in the `CARGO_PKG_AUTHORS` -environment variable within `build.rs`. It is not displayed in the [crates.io] -user interface. - -> **Warning**: Package manifests cannot be changed once published, so this -> field cannot be changed or removed in already-published versions of a -> package. - - -#### The `edition` field - -The `edition` key is an optional key that affects which [Rust Edition] your package -is compiled with. Setting the `edition` key in `[package]` will affect all -targets/crates in the package, including test suites, benchmarks, binaries, -examples, etc. - -```toml -[package] -# ... -edition = '2021' -``` - -Most manifests have the `edition` field filled in automatically by [`cargo new`] -with the latest stable edition. By default `cargo new` creates a manifest with -the 2021 edition currently. - -If the `edition` field is not present in `Cargo.toml`, then the 2015 edition is -assumed for backwards compatibility. Note that all manifests -created with [`cargo new`] will not use this historical fallback because they -will have `edition` explicitly specified to a newer value. - -#### The `rust-version` field - -The `rust-version` field is an optional key that tells cargo what version of the -Rust language and compiler your package can be compiled with. If the currently -selected version of the Rust compiler is older than the stated version, cargo -will exit with an error, telling the user what version is required. - -The first version of Cargo that supports this field was released with Rust 1.56.0. -In older releases, the field will be ignored, and Cargo will display a warning. - -```toml -[package] -# ... -rust-version = "1.56" -``` - -The Rust version must be a bare version number with two or three components; it -cannot include semver operators or pre-release identifiers. Compiler pre-release -identifiers such as -nightly will be ignored while checking the Rust version. -The `rust-version` must be equal to or newer than the version that first -introduced the configured `edition`. - -The `rust-version` may be ignored using the `--ignore-rust-version` option. - -Setting the `rust-version` key in `[package]` will affect all targets/crates in -the package, including test suites, benchmarks, binaries, examples, etc. - -#### The `description` field - -The description is a short blurb about the package. [crates.io] will display -this with your package. This should be plain text (not Markdown). - -```toml -[package] -# ... -description = "A short description of my package" -``` - -> **Note**: [crates.io] requires the `description` to be set. - - -#### The `documentation` field - -The `documentation` field specifies a URL to a website hosting the crate's -documentation. If no URL is specified in the manifest file, [crates.io] will -automatically link your crate to the corresponding [docs.rs] page. - -```toml -[package] -# ... -documentation = "https://docs.rs/bitflags" -``` - -#### The `readme` field - -The `readme` field should be the path to a file in the package root (relative -to this `Cargo.toml`) that contains general information about the package. -This file will be transferred to the registry when you publish. [crates.io] -will interpret it as Markdown and render it on the crate's page. - -```toml -[package] -# ... -readme = "README.md" -``` - -If no value is specified for this field, and a file named `README.md`, -`README.txt` or `README` exists in the package root, then the name of that -file will be used. You can suppress this behavior by setting this field to -`false`. If the field is set to `true`, a default value of `README.md` will -be assumed. - -#### The `homepage` field - -The `homepage` field should be a URL to a site that is the home page for your -package. - -```toml -[package] -# ... -homepage = "https://serde.rs/" -``` - -#### The `repository` field - -The `repository` field should be a URL to the source repository for your -package. - -```toml -[package] -# ... -repository = "https://github.com/rust-lang/cargo/" -``` - -#### The `license` and `license-file` fields - -The `license` field contains the name of the software license that the package -is released under. The `license-file` field contains the path to a file -containing the text of the license (relative to this `Cargo.toml`). - -[crates.io] interprets the `license` field as an [SPDX 2.1 license -expression][spdx-2.1-license-expressions]. The name must be a known license -from the [SPDX license list 3.11][spdx-license-list-3.11]. Parentheses are not -currently supported. See the [SPDX site] for more information. - -SPDX license expressions support AND and OR operators to combine multiple -licenses.[^slash] - -```toml -[package] -# ... -license = "MIT OR Apache-2.0" -``` - -Using `OR` indicates the user may choose either license. Using `AND` indicates -the user must comply with both licenses simultaneously. The `WITH` operator -indicates a license with a special exception. Some examples: - -* `MIT OR Apache-2.0` -* `LGPL-2.1-only AND MIT AND BSD-2-Clause` -* `GPL-2.0-or-later WITH Bison-exception-2.2` - -If a package is using a nonstandard license, then the `license-file` field may -be specified in lieu of the `license` field. - -```toml -[package] -# ... -license-file = "LICENSE.txt" -``` - -> **Note**: [crates.io] requires either `license` or `license-file` to be set. - -[^slash]: Previously multiple licenses could be separated with a `/`, but that -usage is deprecated. - -#### The `keywords` field - -The `keywords` field is an array of strings that describe this package. This -can help when searching for the package on a registry, and you may choose any -words that would help someone find this crate. - -```toml -[package] -# ... -keywords = ["gamedev", "graphics"] -``` - -> **Note**: [crates.io] has a maximum of 5 keywords. Each keyword must be -> ASCII text, start with a letter, and only contain letters, numbers, `_` or -> `-`, and have at most 20 characters. - -#### The `categories` field - -The `categories` field is an array of strings of the categories this package -belongs to. - -```toml -categories = ["command-line-utilities", "development-tools::cargo-plugins"] -``` - -> **Note**: [crates.io] has a maximum of 5 categories. Each category should -> match one of the strings available at , and -> must match exactly. - - -#### The `workspace` field - -The `workspace` field can be used to configure the workspace that this package -will be a member of. If not specified this will be inferred as the first -Cargo.toml with `[workspace]` upwards in the filesystem. Setting this is -useful if the member is not inside a subdirectory of the workspace root. - -```toml -[package] -# ... -workspace = "path/to/workspace/root" -``` - -This field cannot be specified if the manifest already has a `[workspace]` -table defined. That is, a crate cannot both be a root crate in a workspace -(contain `[workspace]`) and also be a member crate of another workspace -(contain `package.workspace`). - -For more information, see the [workspaces chapter](workspaces.md). - - - -#### The `build` field - -The `build` field specifies a file in the package root which is a [build -script] for building native code. More information can be found in the [build -script guide][build script]. - -[build script]: build-scripts.md - -```toml -[package] -# ... -build = "build.rs" -``` - -The default is `"build.rs"`, which loads the script from a file named -`build.rs` in the root of the package. Use `build = "custom_build_name.rs"` to -specify a path to a different file or `build = false` to disable automatic -detection of the build script. - - -#### The `links` field - -The `links` field specifies the name of a native library that is being linked -to. More information can be found in the [`links`][links] section of the build -script guide. - -[links]: build-scripts.md#the-links-manifest-key - -```toml -[package] -# ... -links = "foo" -``` - - -#### The `exclude` and `include` fields - -The `exclude` and `include` fields can be used to explicitly specify which -files are included when packaging a project to be [published][publishing], -and certain kinds of change tracking (described below). -The patterns specified in the `exclude` field identify a set of files that are -not included, and the patterns in `include` specify files that are explicitly -included. -You may run [`cargo package --list`][`cargo package`] to verify which files will -be included in the package. - -```toml -[package] -# ... -exclude = ["/ci", "images/", ".*"] -``` - -```toml -[package] -# ... -include = ["/src", "COPYRIGHT", "/examples", "!/examples/big_example"] -``` - -The default if neither field is specified is to include all files from the -root of the package, except for the exclusions listed below. - -If `include` is not specified, then the following files will be excluded: - -* If the package is not in a git repository, all "hidden" files starting with - a dot will be skipped. -* If the package is in a git repository, any files that are ignored by the - [gitignore] rules of the repository and global git configuration will be - skipped. - -Regardless of whether `exclude` or `include` is specified, the following files -are always excluded: - -* Any sub-packages will be skipped (any subdirectory that contains a - `Cargo.toml` file). -* A directory named `target` in the root of the package will be skipped. - -The following files are always included: - -* The `Cargo.toml` file of the package itself is always included, it does not - need to be listed in `include`. -* A minimized `Cargo.lock` is automatically included if the package contains a - binary or example target, see [`cargo package`] for more information. -* If a [`license-file`](#the-license-and-license-file-fields) is specified, it - is always included. - -The options are mutually exclusive; setting `include` will override an -`exclude`. If you need to have exclusions to a set of `include` files, use the -`!` operator described below. - -The patterns should be [gitignore]-style patterns. Briefly: - -- `foo` matches any file or directory with the name `foo` anywhere in the - package. This is equivalent to the pattern `**/foo`. -- `/foo` matches any file or directory with the name `foo` only in the root of - the package. -- `foo/` matches any *directory* with the name `foo` anywhere in the package. -- Common glob patterns like `*`, `?`, and `[]` are supported: - - `*` matches zero or more characters except `/`. For example, `*.html` - matches any file or directory with the `.html` extension anywhere in the - package. - - `?` matches any character except `/`. For example, `foo?` matches `food`, - but not `foo`. - - `[]` allows for matching a range of characters. For example, `[ab]` - matches either `a` or `b`. `[a-z]` matches letters a through z. -- `**/` prefix matches in any directory. For example, `**/foo/bar` matches the - file or directory `bar` anywhere that is directly under directory `foo`. -- `/**` suffix matches everything inside. For example, `foo/**` matches all - files inside directory `foo`, including all files in subdirectories below - `foo`. -- `/**/` matches zero or more directories. For example, `a/**/b` matches - `a/b`, `a/x/b`, `a/x/y/b`, and so on. -- `!` prefix negates a pattern. For example, a pattern of `src/*.rs` and - `!foo.rs` would match all files with the `.rs` extension inside the `src` - directory, except for any file named `foo.rs`. - -The include/exclude list is also used for change tracking in some situations. -For targets built with `rustdoc`, it is used to determine the list of files to -track to determine if the target should be rebuilt. If the package has a -[build script] that does not emit any `rerun-if-*` directives, then the -include/exclude list is used for tracking if the build script should be re-run -if any of those files change. - -[gitignore]: https://git-scm.com/docs/gitignore - - -#### The `publish` field - -The `publish` field can be used to prevent a package from being published to a -package registry (like *crates.io*) by mistake, for instance to keep a package -private in a company. - -```toml -[package] -# ... -publish = false -``` - -The value may also be an array of strings which are registry names that are -allowed to be published to. - -```toml -[package] -# ... -publish = ["some-registry-name"] -``` - -If publish array contains a single registry, `cargo publish` command will use -it when `--registry` flag is not specified. - - -#### The `metadata` table - -Cargo by default will warn about unused keys in `Cargo.toml` to assist in -detecting typos and such. The `package.metadata` table, however, is completely -ignored by Cargo and will not be warned about. This section can be used for -tools which would like to store package configuration in `Cargo.toml`. For -example: - -```toml -[package] -name = "..." -# ... - -# Metadata used when generating an Android APK, for example. -[package.metadata.android] -package-name = "my-awesome-android-app" -assets = "path/to/static" -``` - -There is a similar table at the workspace level at -[`workspace.metadata`][workspace-metadata]. While cargo does not specify a -format for the content of either of these tables, it is suggested that -external tools may wish to use them in a consistent fashion, such as referring -to the data in `workspace.metadata` if data is missing from `package.metadata`, -if that makes sense for the tool in question. - -[workspace-metadata]: workspaces.md#the-workspacemetadata-table - -#### The `default-run` field - -The `default-run` field in the `[package]` section of the manifest can be used -to specify a default binary picked by [`cargo run`]. For example, when there is -both `src/bin/a.rs` and `src/bin/b.rs`: - -```toml -[package] -default-run = "a" -``` - -### The `[badges]` section - -The `[badges]` section is for specifying status badges that can be displayed -on a registry website when the package is published. - -> Note: [crates.io] previously displayed badges next to a crate on its -> website, but that functionality has been removed. Packages should place -> badges in its README file which will be displayed on [crates.io] (see [the -> `readme` field](#the-readme-field)). - -```toml -[badges] -# The `maintenance` table indicates the status of the maintenance of -# the crate. This may be used by a registry, but is currently not -# used by crates.io. See https://github.com/rust-lang/crates.io/issues/2437 -# and https://github.com/rust-lang/crates.io/issues/2438 for more details. -# -# The `status` field is required. Available options are: -# - `actively-developed`: New features are being added and bugs are being fixed. -# - `passively-maintained`: There are no plans for new features, but the maintainer intends to -# respond to issues that get filed. -# - `as-is`: The crate is feature complete, the maintainer does not intend to continue working on -# it or providing support, but it works for the purposes it was designed for. -# - `experimental`: The author wants to share it with the community but is not intending to meet -# anyone's particular use case. -# - `looking-for-maintainer`: The current maintainer would like to transfer the crate to someone -# else. -# - `deprecated`: The maintainer does not recommend using this crate (the description of the crate -# can describe why, there could be a better solution available or there could be problems with -# the crate that the author does not want to fix). -# - `none`: Displays no badge on crates.io, since the maintainer has not chosen to specify -# their intentions, potential crate users will need to investigate on their own. -maintenance = { status = "..." } -``` - -### Dependency sections - -See the [specifying dependencies page](specifying-dependencies.md) for -information on the `[dependencies]`, `[dev-dependencies]`, -`[build-dependencies]`, and target-specific `[target.*.dependencies]` sections. - -### The `[profile.*]` sections - -The `[profile]` tables provide a way to customize compiler settings such as -optimizations and debug settings. See [the Profiles chapter](profiles.md) for -more detail. - - - -[`cargo init`]: ../commands/cargo-init.md -[`cargo new`]: ../commands/cargo-new.md -[`cargo package`]: ../commands/cargo-package.md -[`cargo run`]: ../commands/cargo-run.md -[crates.io]: https://crates.io/ -[docs.rs]: https://docs.rs/ -[publishing]: publishing.md -[Rust Edition]: ../../edition-guide/index.html -[spdx-2.1-license-expressions]: https://spdx.org/spdx-specification-21-web-version#h.jxpfx0ykyb60 -[spdx-license-list-3.11]: https://github.com/spdx/license-list-data/tree/v3.11 -[SPDX site]: https://spdx.org/license-list -[TOML]: https://toml.io/ - - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/overriding-dependencies.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/overriding-dependencies.md deleted file mode 100644 index 9dd402a1f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/overriding-dependencies.md +++ /dev/null @@ -1,345 +0,0 @@ -## Overriding Dependencies - -The desire to override a dependency can arise through a number of scenarios. -Most of them, however, boil down to the ability to work with a crate before -it's been published to [crates.io]. For example: - -* A crate you're working on is also used in a much larger application you're - working on, and you'd like to test a bug fix to the library inside of the - larger application. -* An upstream crate you don't work on has a new feature or a bug fix on the - master branch of its git repository which you'd like to test out. -* You're about to publish a new major version of your crate, but you'd like to - do integration testing across an entire package to ensure the new major - version works. -* You've submitted a fix to an upstream crate for a bug you found, but you'd - like to immediately have your application start depending on the fixed - version of the crate to avoid blocking on the bug fix getting merged. - -These scenarios can be solved with the [`[patch]` manifest -section](#the-patch-section). - -This chapter walks through a few different use cases, and includes details -on the different ways to override a dependency. - -* Example use cases - * [Testing a bugfix](#testing-a-bugfix) - * [Working with an unpublished minor version](#working-with-an-unpublished-minor-version) - * [Overriding repository URL](#overriding-repository-url) - * [Prepublishing a breaking change](#prepublishing-a-breaking-change) - * [Using `[patch]` with multiple versions](#using-patch-with-multiple-versions) -* Reference - * [The `[patch]` section](#the-patch-section) - * [The `[replace]` section](#the-replace-section) - * [`paths` overrides](#paths-overrides) - -> **Note**: See also specifying a dependency with [multiple locations], which -> can be used to override the source for a single dependency declaration in a -> local package. - -### Testing a bugfix - -Let's say you're working with the [`uuid` crate] but while you're working on it -you discover a bug. You are, however, quite enterprising so you decide to also -try to fix the bug! Originally your manifest will look like: - -[`uuid` crate]: https://crates.io/crates/uuid - -```toml -[package] -name = "my-library" -version = "0.1.0" - -[dependencies] -uuid = "1.0" -``` - -First thing we'll do is to clone the [`uuid` repository][uuid-repository] -locally via: - -```console -$ git clone https://github.com/uuid-rs/uuid -``` - -Next we'll edit the manifest of `my-library` to contain: - -```toml -[patch.crates-io] -uuid = { path = "../path/to/uuid" } -``` - -Here we declare that we're *patching* the source `crates-io` with a new -dependency. This will effectively add the local checked out version of `uuid` to -the crates.io registry for our local package. - -Next up we need to ensure that our lock file is updated to use this new version -of `uuid` so our package uses the locally checked out copy instead of one from -crates.io. The way `[patch]` works is that it'll load the dependency at -`../path/to/uuid` and then whenever crates.io is queried for versions of `uuid` -it'll *also* return the local version. - -This means that the version number of the local checkout is significant and will -affect whether the patch is used. Our manifest declared `uuid = "1.0"` which -means we'll only resolve to `>= 1.0.0, < 2.0.0`, and Cargo's greedy resolution -algorithm also means that we'll resolve to the maximum version within that -range. Typically this doesn't matter as the version of the git repository will -already be greater or match the maximum version published on crates.io, but it's -important to keep this in mind! - -In any case, typically all you need to do now is: - -```console -$ cargo build - Compiling uuid v1.0.0 (.../uuid) - Compiling my-library v0.1.0 (.../my-library) - Finished dev [unoptimized + debuginfo] target(s) in 0.32 secs -``` - -And that's it! You're now building with the local version of `uuid` (note the -path in parentheses in the build output). If you don't see the local path version getting -built then you may need to run `cargo update -p uuid --precise $version` where -`$version` is the version of the locally checked out copy of `uuid`. - -Once you've fixed the bug you originally found the next thing you'll want to do -is to likely submit that as a pull request to the `uuid` crate itself. Once -you've done this then you can also update the `[patch]` section. The listing -inside of `[patch]` is just like the `[dependencies]` section, so once your pull -request is merged you could change your `path` dependency to: - -```toml -[patch.crates-io] -uuid = { git = 'https://github.com/uuid-rs/uuid' } -``` - -[uuid-repository]: https://github.com/uuid-rs/uuid - -### Working with an unpublished minor version - -Let's now shift gears a bit from bug fixes to adding features. While working on -`my-library` you discover that a whole new feature is needed in the `uuid` -crate. You've implemented this feature, tested it locally above with `[patch]`, -and submitted a pull request. Let's go over how you continue to use and test it -before it's actually published. - -Let's also say that the current version of `uuid` on crates.io is `1.0.0`, but -since then the master branch of the git repository has updated to `1.0.1`. This -branch includes your new feature you submitted previously. To use this -repository we'll edit our `Cargo.toml` to look like - -```toml -[package] -name = "my-library" -version = "0.1.0" - -[dependencies] -uuid = "1.0.1" - -[patch.crates-io] -uuid = { git = 'https://github.com/uuid-rs/uuid' } -``` - -Note that our local dependency on `uuid` has been updated to `1.0.1` as it's -what we'll actually require once the crate is published. This version doesn't -exist on crates.io, though, so we provide it with the `[patch]` section of the -manifest. - -Now when our library is built it'll fetch `uuid` from the git repository and -resolve to 1.0.1 inside the repository instead of trying to download a version -from crates.io. Once 1.0.1 is published on crates.io the `[patch]` section can -be deleted. - -It's also worth noting that `[patch]` applies *transitively*. Let's say you use -`my-library` in a larger package, such as: - -```toml -[package] -name = "my-binary" -version = "0.1.0" - -[dependencies] -my-library = { git = 'https://example.com/git/my-library' } -uuid = "1.0" - -[patch.crates-io] -uuid = { git = 'https://github.com/uuid-rs/uuid' } -``` - -Remember that `[patch]` is applicable *transitively* but can only be defined at -the *top level* so we consumers of `my-library` have to repeat the `[patch]` section -if necessary. Here, though, the new `uuid` crate applies to *both* our dependency on -`uuid` and the `my-library -> uuid` dependency. The `uuid` crate will be resolved to -one version for this entire crate graph, 1.0.1, and it'll be pulled from the git -repository. - -#### Overriding repository URL - -In case the dependency you want to override isn't loaded from `crates.io`, -you'll have to change a bit how you use `[patch]`. For example, if the -dependency is a git dependency, you can override it to a local path with: - -```toml -[patch."https://github.com/your/repository"] -my-library = { path = "../my-library/path" } -``` - -And that's it! - -### Prepublishing a breaking change - -Let's take a look at working with a new major version of a crate, typically -accompanied with breaking changes. Sticking with our previous crates, this -means that we're going to be creating version 2.0.0 of the `uuid` crate. After -we've submitted all changes upstream we can update our manifest for -`my-library` to look like: - -```toml -[dependencies] -uuid = "2.0" - -[patch.crates-io] -uuid = { git = "https://github.com/uuid-rs/uuid", branch = "2.0.0" } -``` - -And that's it! Like with the previous example the 2.0.0 version doesn't actually -exist on crates.io but we can still put it in through a git dependency through -the usage of the `[patch]` section. As a thought exercise let's take another -look at the `my-binary` manifest from above again as well: - -```toml -[package] -name = "my-binary" -version = "0.1.0" - -[dependencies] -my-library = { git = 'https://example.com/git/my-library' } -uuid = "1.0" - -[patch.crates-io] -uuid = { git = 'https://github.com/uuid-rs/uuid', branch = '2.0.0' } -``` - -Note that this will actually resolve to two versions of the `uuid` crate. The -`my-binary` crate will continue to use the 1.x.y series of the `uuid` crate but -the `my-library` crate will use the `2.0.0` version of `uuid`. This will allow you -to gradually roll out breaking changes to a crate through a dependency graph -without being forced to update everything all at once. - -### Using `[patch]` with multiple versions - -You can patch in multiple versions of the same crate with the `package` key -used to rename dependencies. For example let's say that the `serde` crate has -a bugfix that we'd like to use to its `1.*` series but we'd also like to -prototype using a `2.0.0` version of serde we have in our git repository. To -configure this we'd do: - -```toml -[patch.crates-io] -serde = { git = 'https://github.com/serde-rs/serde' } -serde2 = { git = 'https://github.com/example/serde', package = 'serde', branch = 'v2' } -``` - -The first `serde = ...` directive indicates that serde `1.*` should be used -from the git repository (pulling in the bugfix we need) and the second `serde2 -= ...` directive indicates that the `serde` package should also be pulled from -the `v2` branch of `https://github.com/example/serde`. We're assuming here -that `Cargo.toml` on that branch mentions version `2.0.0`. - -Note that when using the `package` key the `serde2` identifier here is actually -ignored. We simply need a unique name which doesn't conflict with other patched -crates. - -### The `[patch]` section - -The `[patch]` section of `Cargo.toml` can be used to override dependencies -with other copies. The syntax is similar to the -[`[dependencies]`][dependencies] section: - -```toml -[patch.crates-io] -foo = { git = 'https://github.com/example/foo' } -bar = { path = 'my/local/bar' } - -[dependencies.baz] -git = 'https://github.com/example/baz' - -[patch.'https://github.com/example/baz'] -baz = { git = 'https://github.com/example/patched-baz', branch = 'my-branch' } -``` - -The `[patch]` table is made of dependency-like sub-tables. Each key after -`[patch]` is a URL of the source that is being patched, or the name of a -registry. The name `crates-io` may be used to override the default registry -[crates.io]. The first `[patch]` in the example above demonstrates overriding -[crates.io], and the second `[patch]` demonstrates overriding a git source. - -Each entry in these tables is a normal dependency specification, the same as -found in the `[dependencies]` section of the manifest. The dependencies listed -in the `[patch]` section are resolved and used to patch the source at the -URL specified. The above manifest snippet patches the `crates-io` source (e.g. -crates.io itself) with the `foo` crate and `bar` crate. It also -patches the `https://github.com/example/baz` source with a `my-branch` that -comes from elsewhere. - -Sources can be patched with versions of crates that do not exist, and they can -also be patched with versions of crates that already exist. If a source is -patched with a crate version that already exists in the source, then the -source's original crate is replaced. - -### The `[replace]` section - -> **Note**: `[replace]` is deprecated. You should use the -> [`[patch]`](#the-patch-section) table instead. - -This section of Cargo.toml can be used to override dependencies with other -copies. The syntax is similar to the `[dependencies]` section: - -```toml -[replace] -"foo:0.1.0" = { git = 'https://github.com/example/foo' } -"bar:1.0.2" = { path = 'my/local/bar' } -``` - -Each key in the `[replace]` table is a [package ID -specification](pkgid-spec.md), which allows arbitrarily choosing a node in the -dependency graph to override (the 3-part version number is required). The -value of each key is the same as the `[dependencies]` syntax for specifying -dependencies, except that you can't specify features. Note that when a crate -is overridden the copy it's overridden with must have both the same name and -version, but it can come from a different source (e.g., git or a local path). - -### `paths` overrides - -Sometimes you're only temporarily working on a crate and you don't want to have -to modify `Cargo.toml` like with the `[patch]` section above. For this use -case Cargo offers a much more limited version of overrides called **path -overrides**. - -Path overrides are specified through [`.cargo/config.toml`](config.md) instead of -`Cargo.toml`. Inside of `.cargo/config.toml` you'll specify a key called `paths`: - -```toml -paths = ["/path/to/uuid"] -``` - -This array should be filled with directories that contain a `Cargo.toml`. In -this instance, weโ€™re just adding `uuid`, so it will be the only one thatโ€™s -overridden. This path can be either absolute or relative to the directory that -contains the `.cargo` folder. - -Path overrides are more restricted than the `[patch]` section, however, in -that they cannot change the structure of the dependency graph. When a -path replacement is used then the previous set of dependencies -must all match exactly to the new `Cargo.toml` specification. For example this -means that path overrides cannot be used to test out adding a dependency to a -crate, instead `[patch]` must be used in that situation. As a result usage of a -path override is typically isolated to quick bug fixes rather than larger -changes. - -Note: using a local configuration to override paths will only work for crates -that have been published to [crates.io]. You cannot use this feature to tell -Cargo how to find local unpublished crates. - - -[crates.io]: https://crates.io/ -[multiple locations]: specifying-dependencies.md#multiple-locations -[dependencies]: specifying-dependencies.md diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/pkgid-spec.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/pkgid-spec.md deleted file mode 100644 index 7ac32bf0e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/pkgid-spec.md +++ /dev/null @@ -1,67 +0,0 @@ -## Package ID Specifications - -### Package ID specifications - -Subcommands of Cargo frequently need to refer to a particular package within a -dependency graph for various operations like updating, cleaning, building, etc. -To solve this problem, Cargo supports *Package ID Specifications*. A specification -is a string which is used to uniquely refer to one package within a graph of -packages. - -The specification may be fully qualified, such as -`https://github.com/rust-lang/crates.io-index#regex:1.4.3` or it may be -abbreviated, such as `regex`. The abbreviated form may be used as long as it -uniquely identifies a single package in the dependency graph. If there is -ambiguity, additional qualifiers can be added to make it unique. For example, -if there are two versions of the `regex` package in the graph, then it can be -qualified with a version to make it unique, such as `regex:1.4.3`. - -#### Specification grammar - -The formal grammar for a Package Id Specification is: - -```notrust -spec := pkgname - | proto "://" hostname-and-path [ "#" ( pkgname | semver ) ] -pkgname := name [ ":" semver ] - -proto := "http" | "git" | ... -``` - -Here, brackets indicate that the contents are optional. - -The URL form can be used for git dependencies, or to differentiate packages -that come from different sources such as different registries. - -#### Example specifications - -The following are references to the `regex` package on `crates.io`: - -| Spec | Name | Version | -|:------------------------------------------------------------|:-------:|:-------:| -| `regex` | `regex` | `*` | -| `regex:1.4.3` | `regex` | `1.4.3` | -| `https://github.com/rust-lang/crates.io-index#regex` | `regex` | `*` | -| `https://github.com/rust-lang/crates.io-index#regex:1.4.3` | `regex` | `1.4.3` | - -The following are some examples of specs for several different git dependencies: - -| Spec | Name | Version | -|:----------------------------------------------------------|:----------------:|:--------:| -| `https://github.com/rust-lang/cargo#0.52.0` | `cargo` | `0.52.0` | -| `https://github.com/rust-lang/cargo#cargo-platform:0.1.2` | `cargo-platform` | `0.1.2` | -| `ssh://git@github.com/rust-lang/regex.git#regex:1.4.3` | `regex` | `1.4.3` | - -Local packages on the filesystem can use `file://` URLs to reference them: - -| Spec | Name | Version | -|:---------------------------------------|:-----:|:-------:| -| `file:///path/to/my/project/foo` | `foo` | `*` | -| `file:///path/to/my/project/foo#1.1.8` | `foo` | `1.1.8` | - -#### Brevity of specifications - -The goal of this is to enable both succinct and exhaustive syntaxes for -referring to packages in a dependency graph. Ambiguous references may refer to -one or more packages. Most commands generate an error if more than one package -could be referred to with the same specification. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/profiles.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/profiles.md deleted file mode 100644 index 6885f04ff..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/profiles.md +++ /dev/null @@ -1,460 +0,0 @@ -## Profiles - -Profiles provide a way to alter the compiler settings, influencing things like -optimizations and debugging symbols. - -Cargo has 4 built-in profiles: `dev`, `release`, `test`, and `bench`. The -profile is automatically chosen based on which command is being run if a -profile is not specified on the command-line. In addition to the built-in -profiles, custom user-defined profiles can also be specified. - -Profile settings can be changed in [`Cargo.toml`](manifest.md) with the -`[profile]` table. Within each named profile, individual settings can be changed -with key/value pairs like this: - -```toml -[profile.dev] -opt-level = 1 # Use slightly better optimizations. -overflow-checks = false # Disable integer overflow checks. -``` - -Cargo only looks at the profile settings in the `Cargo.toml` manifest at the -root of the workspace. Profile settings defined in dependencies will be -ignored. - -Additionally, profiles can be overridden from a [config] definition. -Specifying a profile in a config file or environment variable will override -the settings from `Cargo.toml`. - -[config]: config.md - -### Profile settings - -The following is a list of settings that can be controlled in a profile. - -#### opt-level - -The `opt-level` setting controls the [`-C opt-level` flag] which controls the level -of optimization. Higher optimization levels may produce faster runtime code at -the expense of longer compiler times. Higher levels may also change and -rearrange the compiled code which may make it harder to use with a debugger. - -The valid options are: - -* `0`: no optimizations -* `1`: basic optimizations -* `2`: some optimizations -* `3`: all optimizations -* `"s"`: optimize for binary size -* `"z"`: optimize for binary size, but also turn off loop vectorization. - -It is recommended to experiment with different levels to find the right -balance for your project. There may be surprising results, such as level `3` -being slower than `2`, or the `"s"` and `"z"` levels not being necessarily -smaller. You may also want to reevaluate your settings over time as newer -versions of `rustc` changes optimization behavior. - -See also [Profile Guided Optimization] for more advanced optimization -techniques. - -[`-C opt-level` flag]: ../../rustc/codegen-options/index.html#opt-level -[Profile Guided Optimization]: ../../rustc/profile-guided-optimization.html - -#### debug - -The `debug` setting controls the [`-C debuginfo` flag] which controls the -amount of debug information included in the compiled binary. - -The valid options are: - -* `0` or `false`: no debug info at all -* `1`: line tables only -* `2` or `true`: full debug info - -You may wish to also configure the [`split-debuginfo`](#split-debuginfo) option -depending on your needs as well. - -[`-C debuginfo` flag]: ../../rustc/codegen-options/index.html#debuginfo - -#### split-debuginfo - -The `split-debuginfo` setting controls the [`-C split-debuginfo` flag] which -controls whether debug information, if generated, is either placed in the -executable itself or adjacent to it. - -This option is a string and acceptable values are the same as those the -[compiler accepts][`-C split-debuginfo` flag]. The default value for this option -is `unpacked` on macOS for profiles that have debug information otherwise -enabled. Otherwise the default for this option is [documented with rustc][`-C -split-debuginfo` flag] and is platform-specific. Some options are only -available on the [nightly channel]. The Cargo default may change in the future -once more testing has been performed, and support for DWARF is stabilized. - -[nightly channel]: ../../book/appendix-07-nightly-rust.html -[`-C split-debuginfo` flag]: ../../rustc/codegen-options/index.html#split-debuginfo - -#### strip - -The `strip` option controls the [`-C strip` flag], which directs rustc to -strip either symbols or debuginfo from a binary. This can be enabled like so: - -```toml -[package] -# ... - -[profile.release] -strip = "debuginfo" -``` - -Possible string values of `strip` are `"none"`, `"debuginfo"`, and `"symbols"`. -The default is `"none"`. - -You can also configure this option with the boolean values `true` or `false`. -`strip = true` is equivalent to `strip = "symbols"`. `strip = false` is -equivalent to `strip = "none"` and disables `strip` completely. - -[`-C strip` flag]: ../../rustc/codegen-options/index.html#strip - -#### debug-assertions - -The `debug-assertions` setting controls the [`-C debug-assertions` flag] which -turns `cfg(debug_assertions)` [conditional compilation] on or off. Debug -assertions are intended to include runtime validation which is only available -in debug/development builds. These may be things that are too expensive or -otherwise undesirable in a release build. Debug assertions enables the -[`debug_assert!` macro] in the standard library. - -The valid options are: - -* `true`: enabled -* `false`: disabled - -[`-C debug-assertions` flag]: ../../rustc/codegen-options/index.html#debug-assertions -[conditional compilation]: ../../reference/conditional-compilation.md#debug_assertions -[`debug_assert!` macro]: ../../std/macro.debug_assert.html - -#### overflow-checks - -The `overflow-checks` setting controls the [`-C overflow-checks` flag] which -controls the behavior of [runtime integer overflow]. When overflow-checks are -enabled, a panic will occur on overflow. - -The valid options are: - -* `true`: enabled -* `false`: disabled - -[`-C overflow-checks` flag]: ../../rustc/codegen-options/index.html#overflow-checks -[runtime integer overflow]: ../../reference/expressions/operator-expr.md#overflow - -#### lto - -The `lto` setting controls the [`-C lto` flag] which controls LLVM's [link -time optimizations]. LTO can produce better optimized code, using -whole-program analysis, at the cost of longer linking time. - -The valid options are: - -* `false`: Performs "thin local LTO" which performs "thin" LTO on the local - crate only across its [codegen units](#codegen-units). No LTO is performed - if codegen units is 1 or [opt-level](#opt-level) is 0. -* `true` or `"fat"`: Performs "fat" LTO which attempts to perform - optimizations across all crates within the dependency graph. -* `"thin"`: Performs ["thin" LTO]. This is similar to "fat", but takes - substantially less time to run while still achieving performance gains - similar to "fat". -* `"off"`: Disables LTO. - -See also the [`-C linker-plugin-lto`] `rustc` flag for cross-language LTO. - -[`-C lto` flag]: ../../rustc/codegen-options/index.html#lto -[link time optimizations]: https://llvm.org/docs/LinkTimeOptimization.html -[`-C linker-plugin-lto`]: ../../rustc/codegen-options/index.html#linker-plugin-lto -["thin" LTO]: http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html - -#### panic - -The `panic` setting controls the [`-C panic` flag] which controls which panic -strategy to use. - -The valid options are: - -* `"unwind"`: Unwind the stack upon panic. -* `"abort"`: Terminate the process upon panic. - -When set to `"unwind"`, the actual value depends on the default of the target -platform. For example, the NVPTX platform does not support unwinding, so it -always uses `"abort"`. - -Tests, benchmarks, build scripts, and proc macros ignore the `panic` setting. -The `rustc` test harness currently requires `unwind` behavior. See the -[`panic-abort-tests`] unstable flag which enables `abort` behavior. - -Additionally, when using the `abort` strategy and building a test, all of the -dependencies will also be forced to build with the `unwind` strategy. - -[`-C panic` flag]: ../../rustc/codegen-options/index.html#panic -[`panic-abort-tests`]: unstable.md#panic-abort-tests - -#### incremental - -The `incremental` setting controls the [`-C incremental` flag] which controls -whether or not incremental compilation is enabled. Incremental compilation -causes `rustc` to save additional information to disk which will be reused -when recompiling the crate, improving re-compile times. The additional -information is stored in the `target` directory. - -The valid options are: - -* `true`: enabled -* `false`: disabled - -Incremental compilation is only used for workspace members and "path" -dependencies. - -The incremental value can be overridden globally with the `CARGO_INCREMENTAL` -[environment variable] or the [`build.incremental`] config variable. - -[`-C incremental` flag]: ../../rustc/codegen-options/index.html#incremental -[environment variable]: environment-variables.md -[`build.incremental`]: config.md#buildincremental - -#### codegen-units - -The `codegen-units` setting controls the [`-C codegen-units` flag] which -controls how many "code generation units" a crate will be split into. More -code generation units allows more of a crate to be processed in parallel -possibly reducing compile time, but may produce slower code. - -This option takes an integer greater than 0. - -The default is 256 for [incremental](#incremental) builds, and 16 for -non-incremental builds. - -[`-C codegen-units` flag]: ../../rustc/codegen-options/index.html#codegen-units - -#### rpath - -The `rpath` setting controls the [`-C rpath` flag] which controls -whether or not [`rpath`] is enabled. - -[`-C rpath` flag]: ../../rustc/codegen-options/index.html#rpath -[`rpath`]: https://en.wikipedia.org/wiki/Rpath - -### Default profiles - -#### dev - -The `dev` profile is used for normal development and debugging. It is the -default for build commands like [`cargo build`]. - -The default settings for the `dev` profile are: - -```toml -[profile.dev] -opt-level = 0 -debug = true -split-debuginfo = '...' # Platform-specific. -debug-assertions = true -overflow-checks = true -lto = false -panic = 'unwind' -incremental = true -codegen-units = 256 -rpath = false -``` - -#### release - -The `release` profile is intended for optimized artifacts used for releases -and in production. This profile is used when the `--release` flag is used, and -is the default for [`cargo install`]. - -The default settings for the `release` profile are: - -```toml -[profile.release] -opt-level = 3 -debug = false -split-debuginfo = '...' # Platform-specific. -debug-assertions = false -overflow-checks = false -lto = false -panic = 'unwind' -incremental = false -codegen-units = 16 -rpath = false -``` - -#### test - -The `test` profile is the default profile used by [`cargo test`]. -The `test` profile inherits the settings from the [`dev`](#dev) profile. - -#### bench - -The `bench` profile is the default profile used by [`cargo bench`]. -The `bench` profile inherits the settings from the [`release`](#release) profile. - -#### Build Dependencies - -All profiles, by default, do not optimize build dependencies (build scripts, -proc macros, and their dependencies). The default settings for build overrides -are: - -```toml -[profile.dev.build-override] -opt-level = 0 -codegen-units = 256 - -[profile.release.build-override] -opt-level = 0 -codegen-units = 256 -``` - -Build dependencies otherwise inherit settings from the active profile in use, as -described in [Profile selection](#profile-selection). - -### Custom profiles - -In addition to the built-in profiles, additional custom profiles can be -defined. These may be useful for setting up multiple workflows and build -modes. When defining a custom profile, you must specify the `inherits` key to -specify which profile the custom profile inherits settings from when the -setting is not specified. - -For example, let's say you want to compare a normal release build with a -release build with [LTO](#lto) optimizations, you can specify something like -the following in `Cargo.toml`: - -```toml -[profile.release-lto] -inherits = "release" -lto = true -``` - -The `--profile` flag can then be used to choose this custom profile: - -```console -cargo build --profile release-lto -``` - -The output for each profile will be placed in a directory of the same name -as the profile in the [`target` directory]. As in the example above, the -output would go into the `target/release-lto` directory. - -[`target` directory]: ../guide/build-cache.md - -### Profile selection - -The profile used depends on the command, the command-line flags like -`--release` or `--profile`, and the package (in the case of -[overrides](#overrides)). The default profile if none is specified is: - -| Command | Default Profile | -|---------|-----------------| -| [`cargo run`], [`cargo build`],
[`cargo check`], [`cargo rustc`] | [`dev` profile](#dev) | -| [`cargo test`] | [`test` profile](#test) -| [`cargo bench`] | [`bench` profile](#bench) -| [`cargo install`] | [`release` profile](#release) - -You can switch to a different profile using the `--profile=NAME` option which will used the given profile. -The `--release` flag is equivalent to `--profile=release`. - -The selected profile applies to all Cargo targets, -including [library](./cargo-targets.md#library), -[binary](./cargo-targets.md#binaries), -[example](./cargo-targets.md#examples), -[test](./cargo-targets.md#tests), -and [benchmark](./cargo-targets.md#benchmarks). - -The profile for specific packages can be specified with -[overrides](#overrides), described below. - -[`cargo bench`]: ../commands/cargo-bench.md -[`cargo build`]: ../commands/cargo-build.md -[`cargo check`]: ../commands/cargo-check.md -[`cargo install`]: ../commands/cargo-install.md -[`cargo run`]: ../commands/cargo-run.md -[`cargo rustc`]: ../commands/cargo-rustc.md -[`cargo test`]: ../commands/cargo-test.md - -### Overrides - -Profile settings can be overridden for specific packages and build-time -crates. To override the settings for a specific package, use the `package` -table to change the settings for the named package: - -```toml -# The `foo` package will use the -Copt-level=3 flag. -[profile.dev.package.foo] -opt-level = 3 -``` - -The package name is actually a [Package ID Spec](pkgid-spec.md), so you can -target individual versions of a package with syntax such as -`[profile.dev.package."foo:2.1.0"]`. - -To override the settings for all dependencies (but not any workspace member), -use the `"*"` package name: - -```toml -# Set the default for dependencies. -[profile.dev.package."*"] -opt-level = 2 -``` - -To override the settings for build scripts, proc macros, and their -dependencies, use the `build-override` table: - -```toml -# Set the settings for build scripts and proc-macros. -[profile.dev.build-override] -opt-level = 3 -``` - -> Note: When a dependency is both a normal dependency and a build dependency, -> Cargo will try to only build it once when `--target` is not specified. When -> using `build-override`, the dependency may need to be built twice, once as a -> normal dependency and once with the overridden build settings. This may -> increase initial build times. - -The precedence for which value is used is done in the following order (first -match wins): - -1. `[profile.dev.package.name]` โ€” A named package. -2. `[profile.dev.package."*"]` โ€” For any non-workspace member. -3. `[profile.dev.build-override]` โ€” Only for build scripts, proc macros, and - their dependencies. -4. `[profile.dev]` โ€” Settings in `Cargo.toml`. -5. Default values built-in to Cargo. - -Overrides cannot specify the `panic`, `lto`, or `rpath` settings. - -#### Overrides and generics - -The location where generic code is instantiated will influence the -optimization settings used for that generic code. This can cause subtle -interactions when using profile overrides to change the optimization level of -a specific crate. If you attempt to raise the optimization level of a -dependency which defines generic functions, those generic functions may not be -optimized when used in your local crate. This is because the code may be -generated in the crate where it is instantiated, and thus may use the -optimization settings of that crate. - -For example, [nalgebra] is a library which defines vectors and matrices making -heavy use of generic parameters. If your local code defines concrete nalgebra -types like `Vector4` and uses their methods, the corresponding nalgebra -code will be instantiated and built within your crate. Thus, if you attempt to -increase the optimization level of `nalgebra` using a profile override, it may -not result in faster performance. - -Further complicating the issue, `rustc` has some optimizations where it will -attempt to share monomorphized generics between crates. If the opt-level is 2 -or 3, then a crate will not use monomorphized generics from other crates, nor -will it export locally defined monomorphized items to be shared with other -crates. When experimenting with optimizing dependencies for development, -consider trying opt-level 1, which will apply some optimizations while still -allowing monomorphized items to be shared. - -[nalgebra]: https://crates.io/crates/nalgebra diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/publishing.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/publishing.md deleted file mode 100644 index 05f706b84..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/publishing.md +++ /dev/null @@ -1,273 +0,0 @@ -## Publishing on crates.io - -Once you've got a library that you'd like to share with the world, it's time to -publish it on [crates.io]! Publishing a crate is when a specific -version is uploaded to be hosted on [crates.io]. - -Take care when publishing a crate, because a publish is **permanent**. The -version can never be overwritten, and the code cannot be deleted. There is no -limit to the number of versions which can be published, however. - -### Before your first publish - -First things first, youโ€™ll need an account on [crates.io] to acquire -an API token. To do so, [visit the home page][crates.io] and log in via a GitHub -account (required for now). After this, visit your [Account -Settings](https://crates.io/me) page and run the [`cargo login`] command -specified. - -```console -$ cargo login abcdefghijklmnopqrstuvwxyz012345 -``` - -This command will inform Cargo of your API token and store it locally in your -`~/.cargo/credentials.toml`. Note that this token is a **secret** and should not be -shared with anyone else. If it leaks for any reason, you should revoke it -immediately. - -### Before publishing a new crate - -Keep in mind that crate names on [crates.io] are allocated on a first-come-first- -serve basis. Once a crate name is taken, it cannot be used for another crate. - -Check out the [metadata you can specify](manifest.md) in `Cargo.toml` to -ensure your crate can be discovered more easily! Before publishing, make sure -you have filled out the following fields: - -- [`license` or `license-file`] -- [`description`] -- [`homepage`] -- [`documentation`] -- [`repository`] -- [`readme`] - -It would also be a good idea to include some [`keywords`] and [`categories`], -though they are not required. - -If you are publishing a library, you may also want to consult the [Rust API -Guidelines]. - -#### Packaging a crate - -The next step is to package up your crate and upload it to [crates.io]. For -this weโ€™ll use the [`cargo publish`] subcommand. This command performs the following -steps: - -1. Perform some verification checks on your package. -2. Compress your source code into a `.crate` file. -3. Extract the `.crate` file into a temporary directory and verify that it - compiles. -4. Upload the `.crate` file to [crates.io]. -5. The registry will perform some additional checks on the uploaded package - before adding it. - -It is recommended that you first run `cargo publish --dry-run` (or [`cargo -package`] which is equivalent) to ensure there aren't any warnings or errors -before publishing. This will perform the first three steps listed above. - -```console -$ cargo publish --dry-run -``` - -You can inspect the generated `.crate` file in the `target/package` directory. -[crates.io] currently has a 10MB size limit on the `.crate` file. You may want -to check the size of the `.crate` file to ensure you didn't accidentally -package up large assets that are not required to build your package, such as -test data, website documentation, or code generation. You can check which -files are included with the following command: - -```console -$ cargo package --list -``` - -Cargo will automatically ignore files ignored by your version control system -when packaging, but if you want to specify an extra set of files to ignore you -can use the [`exclude` key](manifest.md#the-exclude-and-include-fields) in the -manifest: - -```toml -[package] -# ... -exclude = [ - "public/assets/*", - "videos/*", -] -``` - -If youโ€™d rather explicitly list the files to include, Cargo also supports an -`include` key, which if set, overrides the `exclude` key: - -```toml -[package] -# ... -include = [ - "**/*.rs", - "Cargo.toml", -] -``` - -### Uploading the crate - -When you are ready to publish, use the [`cargo publish`] command -to upload to [crates.io]: - -```console -$ cargo publish -``` - -And thatโ€™s it, youโ€™ve now published your first crate! - -### Publishing a new version of an existing crate - -In order to release a new version, change the `version` value specified in -your `Cargo.toml` manifest. Keep in mind [the semver -rules](manifest.md#the-version-field), and consult [RFC 1105] for -what constitutes a semver-breaking change. Then run [`cargo publish`] as -described above to upload the new version. - -### Managing a crates.io-based crate - -Management of crates is primarily done through the command line `cargo` tool -rather than the [crates.io] web interface. For this, there are a few subcommands -to manage a crate. - -#### `cargo yank` - -Occasions may arise where you publish a version of a crate that actually ends up -being broken for one reason or another (syntax error, forgot to include a file, -etc.). For situations such as this, Cargo supports a โ€œyankโ€ of a version of a -crate. - -```console -$ cargo yank --vers 1.0.1 -$ cargo yank --vers 1.0.1 --undo -``` - -A yank **does not** delete any code. This feature is not intended for deleting -accidentally uploaded secrets, for example. If that happens, you must reset -those secrets immediately. - -The semantics of a yanked version are that no new dependencies can be created -against that version, but all existing dependencies continue to work. One of the -major goals of [crates.io] is to act as a permanent archive of crates that does -not change over time, and allowing deletion of a version would go against this -goal. Essentially a yank means that all packages with a `Cargo.lock` will not -break, while any future `Cargo.lock` files generated will not list the yanked -version. - -#### `cargo owner` - -A crate is often developed by more than one person, or the primary maintainer -may change over time! The owner of a crate is the only person allowed to publish -new versions of the crate, but an owner may designate additional owners. - -```console -$ cargo owner --add github-handle -$ cargo owner --remove github-handle -$ cargo owner --add github:rust-lang:owners -$ cargo owner --remove github:rust-lang:owners -``` - -The owner IDs given to these commands must be GitHub user names or GitHub teams. - -If a user name is given to `--add`, that user is invited as a โ€œnamedโ€ owner, with -full rights to the crate. In addition to being able to publish or yank versions -of the crate, they have the ability to add or remove owners, *including* the -owner that made *them* an owner. Needless to say, you shouldnโ€™t make people you -donโ€™t fully trust into a named owner. In order to become a named owner, a user -must have logged into [crates.io] previously. - -If a team name is given to `--add`, that team is invited as a โ€œteamโ€ owner, with -restricted right to the crate. While they have permission to publish or yank -versions of the crate, they *do not* have the ability to add or remove owners. -In addition to being more convenient for managing groups of owners, teams are -just a bit more secure against owners becoming malicious. - -The syntax for teams is currently `github:org:team` (see examples above). -In order to invite a team as an owner one must be a member of that team. No -such restriction applies to removing a team as an owner. - -### GitHub permissions - -Team membership is not something GitHub provides simple public access to, and it -is likely for you to encounter the following message when working with them: - -> It looks like you donโ€™t have permission to query a necessary property from -GitHub to complete this request. You may need to re-authenticate on [crates.io] -to grant permission to read GitHub org memberships. - -This is basically a catch-all for โ€œyou tried to query a team, and one of the -five levels of membership access control denied thisโ€. That is not an -exaggeration. GitHubโ€™s support for team access control is Enterprise Grade. - -The most likely cause of this is simply that you last logged in before this -feature was added. We originally requested *no* permissions from GitHub when -authenticating users, because we didnโ€™t actually ever use the userโ€™s token for -anything other than logging them in. However to query team membership on your -behalf, we now require [the `read:org` scope][oauth-scopes]. - -You are free to deny us this scope, and everything that worked before teams -were introduced will keep working. However you will never be able to add a team -as an owner, or publish a crate as a team owner. If you ever attempt to do this, -you will get the error above. You may also see this error if you ever try to -publish a crate that you donโ€™t own at all, but otherwise happens to have a team. - -If you ever change your mind, or just arenโ€™t sure if [crates.io] has sufficient -permission, you can always go to and re-authenticate, -which will prompt you for permission if [crates.io] doesnโ€™t have all the scopes -it would like to. - -An additional barrier to querying GitHub is that the organization may be -actively denying third party access. To check this, you can go to: - -```text -https://github.com/organizations/:org/settings/oauth_application_policy -``` - -where `:org` is the name of the organization (e.g., `rust-lang`). You may see -something like: - -![Organization Access Control](../images/org-level-acl.png) - -Where you may choose to explicitly remove [crates.io] from your organizationโ€™s -blacklist, or simply press the โ€œRemove Restrictionsโ€ button to allow all third -party applications to access this data. - -Alternatively, when [crates.io] requested the `read:org` scope, you could have -explicitly whitelisted [crates.io] querying the org in question by pressing -the โ€œGrant Accessโ€ button next to its name: - -![Authentication Access Control](../images/auth-level-acl.png) - -#### Troubleshooting GitHub team access errors - -When trying to add a GitHub team as crate owner, you may see an error like: - -```text -error: failed to invite owners to crate : api errors (status 200 OK): could not find the github team org/repo -``` -In that case, you should go to [the GitHub Application settings page] and -check if crates.io is listed in the `Authorized OAuth Apps` tab. -If it isn't, you should go to and authorize it. -Then go back to the Application Settings page on GitHub, click on the -crates.io application in the list, and make sure you or your organization is -listed in the "Organization access" list with a green check mark. If there's -a button labeled `Grant` or `Request`, you should grant the access or -request the org owner to do so. - -[RFC 1105]: https://github.com/rust-lang/rfcs/blob/master/text/1105-api-evolution.md -[Rust API Guidelines]: https://rust-lang.github.io/api-guidelines/ -[`cargo login`]: ../commands/cargo-login.md -[`cargo package`]: ../commands/cargo-package.md -[`cargo publish`]: ../commands/cargo-publish.md -[`categories`]: manifest.md#the-categories-field -[`description`]: manifest.md#the-description-field -[`documentation`]: manifest.md#the-documentation-field -[`homepage`]: manifest.md#the-homepage-field -[`keywords`]: manifest.md#the-keywords-field -[`license` or `license-file`]: manifest.md#the-license-and-license-file-fields -[`readme`]: manifest.md#the-readme-field -[`repository`]: manifest.md#the-repository-field -[crates.io]: https://crates.io/ -[oauth-scopes]: https://developer.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps/ -[the GitHub Application settings page]: https://github.com/settings/applications diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/registries.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/registries.md deleted file mode 100644 index ef23caeee..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/registries.md +++ /dev/null @@ -1,625 +0,0 @@ -## Registries - -Cargo installs crates and fetches dependencies from a "registry". The default -registry is [crates.io]. A registry contains an "index" which contains a -searchable list of available crates. A registry may also provide a web API to -support publishing new crates directly from Cargo. - -> Note: If you are interested in mirroring or vendoring an existing registry, -> take a look at [Source Replacement]. - -### Using an Alternate Registry - -To use a registry other than [crates.io], the name and index URL of the -registry must be added to a [`.cargo/config.toml` file][config]. The `registries` -table has a key for each registry, for example: - -```toml -[registries] -my-registry = { index = "https://my-intranet:8080/git/index" } -``` - -The `index` key should be a URL to a git repository with the registry's index. -A crate can then depend on a crate from another registry by specifying the -`registry` key and a value of the registry's name in that dependency's entry -in `Cargo.toml`: - -```toml -# Sample Cargo.toml -[package] -name = "my-project" -version = "0.1.0" - -[dependencies] -other-crate = { version = "1.0", registry = "my-registry" } -``` - -As with most config values, the index may be specified with an environment -variable instead of a config file. For example, setting the following -environment variable will accomplish the same thing as defining a config file: - -```ignore -CARGO_REGISTRIES_MY_REGISTRY_INDEX=https://my-intranet:8080/git/index -``` - -> Note: [crates.io] does not accept packages that depend on crates from other -> registries. - -### Publishing to an Alternate Registry - -If the registry supports web API access, then packages can be published -directly to the registry from Cargo. Several of Cargo's commands such as -[`cargo publish`] take a `--registry` command-line flag to indicate which -registry to use. For example, to publish the package in the current directory: - -1. `cargo login --registry=my-registry` - - This only needs to be done once. You must enter the secret API token - retrieved from the registry's website. Alternatively the token may be - passed directly to the `publish` command with the `--token` command-line - flag or an environment variable with the name of the registry such as - `CARGO_REGISTRIES_MY_REGISTRY_TOKEN`. - -2. `cargo publish --registry=my-registry` - -Instead of always passing the `--registry` command-line option, the default -registry may be set in [`.cargo/config.toml`][config] with the `registry.default` -key. - -Setting the `package.publish` key in the `Cargo.toml` manifest restricts which -registries the package is allowed to be published to. This is useful to -prevent accidentally publishing a closed-source package to [crates.io]. The -value may be a list of registry names, for example: - -```toml -[package] -# ... -publish = ["my-registry"] -``` - -The `publish` value may also be `false` to restrict all publishing, which is -the same as an empty list. - -The authentication information saved by [`cargo login`] is stored in the -`credentials.toml` file in the Cargo home directory (default `$HOME/.cargo`). It -has a separate table for each registry, for example: - -```toml -[registries.my-registry] -token = "854DvwSlUwEHtIo3kWy6x7UCPKHfzCmy" -``` - -### Running a Registry - -A minimal registry can be implemented by having a git repository that contains -an index, and a server that contains the compressed `.crate` files created by -[`cargo package`]. Users won't be able to use Cargo to publish to it, but this -may be sufficient for closed environments. - -A full-featured registry that supports publishing will additionally need to -have a web API service that conforms to the API used by Cargo. The web API is -documented below. - -Commercial and community projects are available for building and running a -registry. See -for a list of what is available. - -### Index Format - -The following defines the format of the index. New features are occasionally -added, which are only understood starting with the version of Cargo that -introduced them. Older versions of Cargo may not be able to use packages that -make use of new features. However, the format for older packages should not -change, so older versions of Cargo should be able to use them. - -The index is stored in a git repository so that Cargo can efficiently fetch -incremental updates to the index. In the root of the repository is a file -named `config.json` which contains JSON information used by Cargo for -accessing the registry. This is an example of what the [crates.io] config file -looks like: - -```javascript -{ - "dl": "https://crates.io/api/v1/crates", - "api": "https://crates.io" -} -``` - -The keys are: -- `dl`: This is the URL for downloading crates listed in the index. The value - may have the following markers which will be replaced with their - corresponding value: - - - `{crate}`: The name of crate. - - `{version}`: The crate version. - - `{prefix}`: A directory prefix computed from the crate name. For example, - a crate named `cargo` has a prefix of `ca/rg`. See below for details. - - `{lowerprefix}`: Lowercase variant of `{prefix}`. - - `{sha256-checksum}`: The crate's sha256 checksum. - - If none of the markers are present, then the value - `/{crate}/{version}/download` is appended to the end. -- `api`: This is the base URL for the web API. This key is optional, but if it - is not specified, commands such as [`cargo publish`] will not work. The web - API is described below. - -The download endpoint should send the `.crate` file for the requested package. -Cargo supports https, http, and file URLs, HTTP redirects, HTTP1 and HTTP2. -The exact specifics of TLS support depend on the platform that Cargo is -running on, the version of Cargo, and how it was compiled. - -The rest of the index repository contains one file for each package, where the -filename is the name of the package in lowercase. Each version of the package -has a separate line in the file. The files are organized in a tier of -directories: - -- Packages with 1 character names are placed in a directory named `1`. -- Packages with 2 character names are placed in a directory named `2`. -- Packages with 3 character names are placed in the directory - `3/{first-character}` where `{first-character}` is the first character of - the package name. -- All other packages are stored in directories named - `{first-two}/{second-two}` where the top directory is the first two - characters of the package name, and the next subdirectory is the third and - fourth characters of the package name. For example, `cargo` would be stored - in a file named `ca/rg/cargo`. - -> Note: Although the index filenames are in lowercase, the fields that contain -> package names in `Cargo.toml` and the index JSON data are case-sensitive and -> may contain upper and lower case characters. - -The directory name above is calculated based on the package name converted to -lowercase; it is represented by the marker `{lowerprefix}`. When the original -package name is used without case conversion, the resulting directory name is -represented by the marker `{prefix}`. For example, the package `MyCrate` would -have a `{prefix}` of `My/Cr` and a `{lowerprefix}` of `my/cr`. In general, -using `{prefix}` is recommended over `{lowerprefix}`, but there are pros and -cons to each choice. Using `{prefix}` on case-insensitive filesystems results -in (harmless-but-inelegant) directory aliasing. For example, `crate` and -`CrateTwo` have `{prefix}` values of `cr/at` and `Cr/at`; these are distinct on -Unix machines but alias to the same directory on Windows. Using directories -with normalized case avoids aliasing, but on case-sensitive filesystems it's -harder to support older versions of Cargo that lack `{prefix}`/`{lowerprefix}`. -For example, nginx rewrite rules can easily construct `{prefix}` but can't -perform case-conversion to construct `{lowerprefix}`. - -Registries should consider enforcing limitations on package names added to -their index. Cargo itself allows names with any [alphanumeric], `-`, or `_` -characters. [crates.io] imposes its own limitations, including the following: - -- Only allows ASCII characters. -- Only alphanumeric, `-`, and `_` characters. -- First character must be alphabetic. -- Case-insensitive collision detection. -- Prevent differences of `-` vs `_`. -- Under a specific length (max 64). -- Rejects reserved names, such as Windows special filenames like "nul". - -Registries should consider incorporating similar restrictions, and consider -the security implications, such as [IDN homograph -attacks](https://en.wikipedia.org/wiki/IDN_homograph_attack) and other -concerns in [UTR36](https://www.unicode.org/reports/tr36/) and -[UTS39](https://www.unicode.org/reports/tr39/). - -Each line in a package file contains a JSON object that describes a published -version of the package. The following is a pretty-printed example with comments -explaining the format of the entry. - -```javascript -{ - // The name of the package. - // This must only contain alphanumeric, `-`, or `_` characters. - "name": "foo", - // The version of the package this row is describing. - // This must be a valid version number according to the Semantic - // Versioning 2.0.0 spec at https://semver.org/. - "vers": "0.1.0", - // Array of direct dependencies of the package. - "deps": [ - { - // Name of the dependency. - // If the dependency is renamed from the original package name, - // this is the new name. The original package name is stored in - // the `package` field. - "name": "rand", - // The SemVer requirement for this dependency. - // This must be a valid version requirement defined at - // https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html. - "req": "^0.6", - // Array of features (as strings) enabled for this dependency. - "features": ["i128_support"], - // Boolean of whether or not this is an optional dependency. - "optional": false, - // Boolean of whether or not default features are enabled. - "default_features": true, - // The target platform for the dependency. - // null if not a target dependency. - // Otherwise, a string such as "cfg(windows)". - "target": null, - // The dependency kind. - // "dev", "build", or "normal". - // Note: this is a required field, but a small number of entries - // exist in the crates.io index with either a missing or null - // `kind` field due to implementation bugs. - "kind": "normal", - // The URL of the index of the registry where this dependency is - // from as a string. If not specified or null, it is assumed the - // dependency is in the current registry. - "registry": null, - // If the dependency is renamed, this is a string of the actual - // package name. If not specified or null, this dependency is not - // renamed. - "package": null, - } - ], - // A SHA256 checksum of the `.crate` file. - "cksum": "d867001db0e2b6e0496f9fac96930e2d42233ecd3ca0413e0753d4c7695d289c", - // Set of features defined for the package. - // Each feature maps to an array of features or dependencies it enables. - "features": { - "extras": ["rand/simd_support"] - }, - // Boolean of whether or not this version has been yanked. - "yanked": false, - // The `links` string value from the package's manifest, or null if not - // specified. This field is optional and defaults to null. - "links": null -} -``` - -The JSON objects should not be modified after they are added except for the -`yanked` field whose value may change at any time. - -### Web API - -A registry may host a web API at the location defined in `config.json` to -support any of the actions listed below. - -Cargo includes the `Authorization` header for requests that require -authentication. The header value is the API token. The server should respond -with a 403 response code if the token is not valid. Users are expected to -visit the registry's website to obtain a token, and Cargo can store the token -using the [`cargo login`] command, or by passing the token on the -command-line. - -Responses use a 200 response code for both success and errors. Cargo looks at -the JSON response to determine if there was success or failure. Failure -responses have a JSON object with the following structure: - -```javascript -{ - // Array of errors to display to the user. - "errors": [ - { - // The error message as a string. - "detail": "error message text" - } - ] -} -``` - -Servers may also respond with a 404 response code to indicate the requested -resource is not found (for example, an unknown crate name). However, using a -200 response with an `errors` object allows a registry to provide a more -detailed error message if desired. - -For backwards compatibility, servers should ignore any unexpected query -parameters or JSON fields. If a JSON field is missing, it should be assumed to -be null. The endpoints are versioned with the `v1` component of the path, and -Cargo is responsible for handling backwards compatibility fallbacks should any -be required in the future. - -Cargo sets the following headers for all requests: - -- `Content-Type`: `application/json` -- `Accept`: `application/json` -- `User-Agent`: The Cargo version such as `cargo 1.32.0 (8610973aa - 2019-01-02)`. This may be modified by the user in a configuration value. - Added in 1.29. - -#### Publish - -- Endpoint: `/api/v1/crates/new` -- Method: PUT -- Authorization: Included - -The publish endpoint is used to publish a new version of a crate. The server -should validate the crate, make it available for download, and add it to the -index. - -The body of the data sent by Cargo is: - -- 32-bit unsigned little-endian integer of the length of JSON data. -- Metadata of the package as a JSON object. -- 32-bit unsigned little-endian integer of the length of the `.crate` file. -- The `.crate` file. - -The following is a commented example of the JSON object. Some notes of some -restrictions imposed by [crates.io] are included only to illustrate some -suggestions on types of validation that may be done, and should not be -considered as an exhaustive list of restrictions [crates.io] imposes. - -```javascript -{ - // The name of the package. - "name": "foo", - // The version of the package being published. - "vers": "0.1.0", - // Array of direct dependencies of the package. - "deps": [ - { - // Name of the dependency. - // If the dependency is renamed from the original package name, - // this is the original name. The new package name is stored in - // the `explicit_name_in_toml` field. - "name": "rand", - // The semver requirement for this dependency. - "version_req": "^0.6", - // Array of features (as strings) enabled for this dependency. - "features": ["i128_support"], - // Boolean of whether or not this is an optional dependency. - "optional": false, - // Boolean of whether or not default features are enabled. - "default_features": true, - // The target platform for the dependency. - // null if not a target dependency. - // Otherwise, a string such as "cfg(windows)". - "target": null, - // The dependency kind. - // "dev", "build", or "normal". - "kind": "normal", - // The URL of the index of the registry where this dependency is - // from as a string. If not specified or null, it is assumed the - // dependency is in the current registry. - "registry": null, - // If the dependency is renamed, this is a string of the new - // package name. If not specified or null, this dependency is not - // renamed. - "explicit_name_in_toml": null, - } - ], - // Set of features defined for the package. - // Each feature maps to an array of features or dependencies it enables. - // Cargo does not impose limitations on feature names, but crates.io - // requires alphanumeric ASCII, `_` or `-` characters. - "features": { - "extras": ["rand/simd_support"] - }, - // List of strings of the authors. - // May be empty. - "authors": ["Alice "], - // Description field from the manifest. - // May be null. crates.io requires at least some content. - "description": null, - // String of the URL to the website for this package's documentation. - // May be null. - "documentation": null, - // String of the URL to the website for this package's home page. - // May be null. - "homepage": null, - // String of the content of the README file. - // May be null. - "readme": null, - // String of a relative path to a README file in the crate. - // May be null. - "readme_file": null, - // Array of strings of keywords for the package. - "keywords": [], - // Array of strings of categories for the package. - "categories": [], - // String of the license for the package. - // May be null. crates.io requires either `license` or `license_file` to be set. - "license": null, - // String of a relative path to a license file in the crate. - // May be null. - "license_file": null, - // String of the URL to the website for the source repository of this package. - // May be null. - "repository": null, - // Optional object of "status" badges. Each value is an object of - // arbitrary string to string mappings. - // crates.io has special interpretation of the format of the badges. - "badges": { - "travis-ci": { - "branch": "master", - "repository": "rust-lang/cargo" - } - }, - // The `links` string value from the package's manifest, or null if not - // specified. This field is optional and defaults to null. - "links": null -} -``` - -A successful response includes the JSON object: - -```javascript -{ - // Optional object of warnings to display to the user. - "warnings": { - // Array of strings of categories that are invalid and ignored. - "invalid_categories": [], - // Array of strings of badge names that are invalid and ignored. - "invalid_badges": [], - // Array of strings of arbitrary warnings to display to the user. - "other": [] - } -} -``` - -#### Yank - -- Endpoint: `/api/v1/crates/{crate_name}/{version}/yank` -- Method: DELETE -- Authorization: Included - -The yank endpoint will set the `yank` field of the given version of a crate to -`true` in the index. - -A successful response includes the JSON object: - -```javascript -{ - // Indicates the delete succeeded, always true. - "ok": true, -} -``` - -#### Unyank - -- Endpoint: `/api/v1/crates/{crate_name}/{version}/unyank` -- Method: PUT -- Authorization: Included - -The unyank endpoint will set the `yank` field of the given version of a crate -to `false` in the index. - -A successful response includes the JSON object: - -```javascript -{ - // Indicates the delete succeeded, always true. - "ok": true, -} -``` - -#### Owners - -Cargo does not have an inherent notion of users and owners, but it does -provide the `owner` command to assist managing who has authorization to -control a crate. It is up to the registry to decide exactly how users and -owners are handled. See the [publishing documentation] for a description of -how [crates.io] handles owners via GitHub users and teams. - -##### Owners: List - -- Endpoint: `/api/v1/crates/{crate_name}/owners` -- Method: GET -- Authorization: Included - -The owners endpoint returns a list of owners of the crate. - -A successful response includes the JSON object: - -```javascript -{ - // Array of owners of the crate. - "users": [ - { - // Unique unsigned 32-bit integer of the owner. - "id": 70, - // The unique username of the owner. - "login": "github:rust-lang:core", - // Name of the owner. - // This is optional and may be null. - "name": "Core", - } - ] -} -``` - -##### Owners: Add - -- Endpoint: `/api/v1/crates/{crate_name}/owners` -- Method: PUT -- Authorization: Included - -A PUT request will send a request to the registry to add a new owner to a -crate. It is up to the registry how to handle the request. For example, -[crates.io] sends an invite to the user that they must accept before being -added. - -The request should include the following JSON object: - -```javascript -{ - // Array of `login` strings of owners to add. - "users": ["login_name"] -} -``` - -A successful response includes the JSON object: - -```javascript -{ - // Indicates the add succeeded, always true. - "ok": true, - // A string to be displayed to the user. - "msg": "user ehuss has been invited to be an owner of crate cargo" -} -``` - -##### Owners: Remove - -- Endpoint: `/api/v1/crates/{crate_name}/owners` -- Method: DELETE -- Authorization: Included - -A DELETE request will remove an owner from a crate. The request should include -the following JSON object: - -```javascript -{ - // Array of `login` strings of owners to remove. - "users": ["login_name"] -} -``` - -A successful response includes the JSON object: - -```javascript -{ - // Indicates the remove succeeded, always true. - "ok": true -} -``` - -#### Search - -- Endpoint: `/api/v1/crates` -- Method: GET -- Query Parameters: - - `q`: The search query string. - - `per_page`: Number of results, default 10, max 100. - -The search request will perform a search for crates, using criteria defined on -the server. - -A successful response includes the JSON object: - -```javascript -{ - // Array of results. - "crates": [ - { - // Name of the crate. - "name": "rand", - // The highest version available. - "max_version": "0.6.1", - // Textual description of the crate. - "description": "Random number generators and other randomness functionality.\n", - } - ], - "meta": { - // Total number of results available on the server. - "total": 119 - } -} -``` - -#### Login - -- Endpoint: `/me` - -The "login" endpoint is not an actual API request. It exists solely for the -[`cargo login`] command to display a URL to instruct a user to visit in a web -browser to log in and retrieve an API token. - -[Source Replacement]: source-replacement.md -[`cargo login`]: ../commands/cargo-login.md -[`cargo package`]: ../commands/cargo-package.md -[`cargo publish`]: ../commands/cargo-publish.md -[alphanumeric]: ../../std/primitive.char.html#method.is_alphanumeric -[config]: config.md -[crates.io]: https://crates.io/ -[publishing documentation]: publishing.md#cargo-owner diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/resolver.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/resolver.md deleted file mode 100644 index d3476d083..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/resolver.md +++ /dev/null @@ -1,526 +0,0 @@ -# Dependency Resolution - -One of Cargo's primary tasks is to determine the versions of dependencies to -use based on the version requirements specified in each package. This process -is called "dependency resolution" and is performed by the "resolver". The -result of the resolution is stored in the `Cargo.lock` file which "locks" the -dependencies to specific versions, and keeps them fixed over time. - -The resolver attempts to unify common dependencies while considering possibly -conflicting requirements. The sections below provide some details on how these -constraints are handled, and how to work with the resolver. - -See the chapter [Specifying Dependencies] for more details about how -dependency requirements are specified. - -The [`cargo tree`] command can be used to visualize the result of the -resolver. - -[Specifying Dependencies]: specifying-dependencies.md -[`cargo tree`]: ../commands/cargo-tree.md - -## SemVer compatibility - -Cargo uses [SemVer] for specifying version numbers. This establishes a common -convention for what is compatible between different versions of a package. See -the [SemVer Compatibility] chapter for guidance on what is considered a -"compatible" change. This notion of "compatibility" is important because Cargo -assumes it should be safe to update a dependency within a compatibility range -without breaking the build. - -Versions are considered compatible if their left-most non-zero -major/minor/patch component is the same. For example, `1.0.3` and `1.1.0` are -considered compatible, and thus it should be safe to update from the older -release to the newer one. However, an update from `1.1.0` to `2.0.0` would not -be allowed to be made automatically. This convention also applies to versions -with leading zeros. For example, `0.1.0` and `0.1.2` are compatible, but -`0.1.0` and `0.2.0` are not. Similarly, `0.0.1` and `0.0.2` are not -compatible. - -As a quick refresher, the *version requirement* syntax Cargo uses for -dependencies is: - -Requirement | Example | Equivalence | Description ---|--------|--|------------- -Caret | `1.2.3` or `^1.2.3` | >=1.2.3, <2.0.0 | Any SemVer-compatible version of at least the given value. -Tilde | `~1.2` | >=1.2.0, <1.3.0 | Minimum version, with restricted compatibility range. -Wildcard | `1.*` | >=1.0.0, <2.0.0 | Any version in the `*` position. -Equals | `=1.2.3` | =1.2.3 | Exactly the specified version only. -Comparison | `>1.1` | >=1.2.0 | Naive numeric comparison of specified digits. -Compound | >=1.2, <1.5 | >1.2.0, <1.5.0 | Multiple requirements that must be simultaneously satisfied. - -When multiple packages specify a dependency for a common package, the resolver -attempts to ensure that they use the same version of that common package, as -long as they are within a SemVer compatibility range. It also attempts to use -the greatest version currently available within that compatibility range. For -example, if there are two packages in the resolve graph with the following -requirements: - -```toml -# Package A -[dependencies] -bitflags = "1.0" - -# Package B -[dependencies] -bitflags = "1.1" -``` - -If at the time the `Cargo.lock` file is generated, the greatest version of -`bitflags` is `1.2.1`, then both packages will use `1.2.1` because it is the -greatest within the compatibility range. If `2.0.0` is published, it will -still use `1.2.1` because `2.0.0` is considered incompatible. - -If multiple packages have a common dependency with semver-incompatible -versions, then Cargo will allow this, but will build two separate copies of -the dependency. For example: - -```toml -# Package A -[dependencies] -rand = "0.7" - -# Package B -[dependencies] -rand = "0.6" -``` - -The above will result in Package A using the greatest `0.7` release (`0.7.3` -at the time of this writing) and Package B will use the greatest `0.6` release -(`0.6.5` for example). This can lead to potential problems, see the -[Version-incompatibility hazards] section for more details. - -Multiple versions within the same compatibility range are not allowed and will -result in a resolver error if it is constrained to two different versions -within a compatibility range. For example, if there are two packages in the -resolve graph with the following requirements: - -```toml -# Package A -[dependencies] -log = "=0.4.11" - -# Package B -[dependencies] -log = "=0.4.8" -``` - -The above will fail because it is not allowed to have two separate copies of -the `0.4` release of the `log` package. - -[SemVer]: https://semver.org/ -[SemVer Compatibility]: semver.md -[Version-incompatibility hazards]: #version-incompatibility-hazards - -### Version-incompatibility hazards - -When multiple versions of a crate appear in the resolve graph, this can cause -problems when types from those crates are exposed by the crates using them. -This is because the types and items are considered different by the Rust -compiler, even if they have the same name. Libraries should take care when -publishing a SemVer-incompatible version (for example, publishing `2.0.0` -after `1.0.0` has been in use), particularly for libraries that are widely -used. - -The "[semver trick]" is a workaround for this problem of publishing a breaking -change while retaining compatibility with older versions. The linked page goes -into detail about what the problem is and how to address it. In short, when a -library wants to publish a SemVer-breaking release, publish the new release, -and also publish a point release of the previous version that reexports the -types from the newer version. - -These incompatibilities usually manifest as a compile-time error, but -sometimes they will only appear as a runtime misbehavior. For example, let's -say there is a common library named `foo` that ends up appearing with both -version `1.0.0` and `2.0.0` in the resolve graph. If [`downcast_ref`] is used -on a object created by a library using version `1.0.0`, and the code calling -`downcast_ref` is downcasting to a type from version `2.0.0`, the downcast -will fail at runtime. - -It is important to make sure that if you have multiple versions of a library -that you are properly using them, especially if it is ever possible for the -types from different versions to be used together. The [`cargo tree --d`][`cargo tree`] command can be used to identify duplicate versions and -where they come from. Similarly, it is important to consider the impact on the -ecosystem if you publish a SemVer-incompatible version of a popular library. - -[semver trick]: https://github.com/dtolnay/semver-trick -[`downcast_ref`]: ../../std/any/trait.Any.html#method.downcast_ref - -### Pre-releases - -SemVer has the concept of "pre-releases" with a dash in the version, such as -`1.0.0-alpha`, or `1.0.0-beta`. Cargo will avoid automatically using -pre-releases unless explicitly asked. For example, if `1.0.0-alpha` of package -`foo` is published, then a requirement of `foo = "1.0"` will *not* match, and -will return an error. The pre-release must be specified, such as `foo = -"1.0.0-alpha"`. Similarly [`cargo install`] will avoid pre-releases unless -explicitly asked to install one. - -Cargo allows "newer" pre-releases to be used automatically. For example, if -`1.0.0-beta` is published, then a requirement `foo = "1.0.0-alpha"` will allow -updating to the `beta` version. Beware that pre-release versions can be -unstable, and as such care should be taken when using them. Some projects may -choose to publish breaking changes between pre-release versions. It is -recommended to not use pre-release dependencies in a library if your library -is not also a pre-release. Care should also be taken when updating your -`Cargo.lock`, and be prepared if a pre-release update causes issues. - -The pre-release tag may be separated with periods to distinguish separate -components. Numeric components will use numeric comparison. For example, -`1.0.0-alpha.4` will use numeric comparison for the `4` component. That means -that if `1.0.0-alpha.11` is published, that will be chosen as the greatest -release. Non-numeric components are compared lexicographically. - -[`cargo install`]: ../commands/cargo-install.md - -### Version metadata - -SemVer has the concept of "version metadata" with a plus in the version, such -as `1.0.0+21AF26D3`. This metadata is usually ignored, and should not be used -in a version requirement. You should never publish multiple versions that -differ only in the metadata tag (note, this is a [known issue] with -[crates.io] that currently permits this). - -[known issue]: https://github.com/rust-lang/crates.io/issues/1059 -[crates.io]: https://crates.io/ - -## Other constraints - -Version requirements aren't the only constraint that the resolver considers -when selecting and unifying dependencies. The following sections cover some of -the other constraints that can affect resolution. - -### Features - -For the purpose of generating `Cargo.lock`, the resolver builds the dependency -graph as-if all [features] of all [workspace] members are enabled. This -ensures that any optional dependencies are available and properly resolved -with the rest of the graph when features are added or removed with the -[`--features` command-line flag](features.md#command-line-feature-options). -The resolver runs a second time to determine the actual features used when -*compiling* a crate, based on the features selected on the command-line. - -Dependencies are resolved with the union of all features enabled on them. For -example, if one package depends on the [`im`] package with the [`serde` -dependency] enabled and another package depends on it with the [`rayon` -dependency] enabled, then `im` will be built with both features enabled, and -the `serde` and `rayon` crates will be included in the resolve graph. If no -packages depend on `im` with those features, then those optional dependencies -will be ignored, and they will not affect resolution. - -When building multiple packages in a workspace (such as with `--workspace` or -multiple `-p` flags), the features of the dependencies of all of those -packages are unified. If you have a circumstance where you want to avoid that -unification for different workspace members, you will need to build them via -separate `cargo` invocations. - -The resolver will skip over versions of packages that are missing required -features. For example, if a package depends on version `^1` of [`regex`] with -the [`perf` feature], then the oldest version it can select is `1.3.0`, -because versions prior to that did not contain the `perf` feature. Similarly, -if a feature is removed from a new release, then packages that require that -feature will be stuck on the older releases that contain that feature. It is -discouraged to remove features in a SemVer-compatible release. Beware that -optional dependencies also define an implicit feature, so removing an optional -dependency or making it non-optional can cause problems, see [removing an -optional dependency]. - -[`im`]: https://crates.io/crates/im -[`perf` feature]: https://github.com/rust-lang/regex/blob/1.3.0/Cargo.toml#L56 -[`rayon` dependency]: https://github.com/bodil/im-rs/blob/v15.0.0/Cargo.toml#L47 -[`regex`]: https://crates.io/crates/regex -[`serde` dependency]: https://github.com/bodil/im-rs/blob/v15.0.0/Cargo.toml#L46 -[features]: features.md -[removing an optional dependency]: semver.md#cargo-remove-opt-dep -[workspace]: workspaces.md - -#### Feature resolver version 2 - -When `resolver = "2"` is specified in `Cargo.toml` (see [resolver -versions](#resolver-versions) below), a different feature resolver is used -which uses a different algorithm for unifying features. The version `"1"` -resolver will unify features for a package no matter where it is specified. -The version `"2"` resolver will avoid unifying features in the following -situations: - -* Features for target-specific dependencies are not enabled if the target is - not currently being built. For example: - - ```toml - [dependency.common] - version = "1.0" - features = ["f1"] - - [target.'cfg(windows)'.dependencies.common] - version = "1.0" - features = ["f2"] - ``` - - When building this example for a non-Windows platform, the `f2` feature will - *not* be enabled. - -* Features enabled on [build-dependencies] or proc-macros will not be unified - when those same dependencies are used as a normal dependency. For example: - - ```toml - [dependencies] - log = "0.4" - - [build-dependencies] - log = {version = "0.4", features=['std']} - ``` - - When building the build script, the `log` crate will be built with the `std` - feature. When building the library of your package, it will not enable the - feature. - -* Features enabled on [dev-dependencies] will not be unified when those same - dependencies are used as a normal dependency, unless those dev-dependencies - are currently being built. For example: - - ```toml - [dependencies] - serde = {version = "1.0", default-features = false} - - [dev-dependencies] - serde = {version = "1.0", features = ["std"]} - ``` - - In this example, the library will normally link against `serde` without the - `std` feature. However, when built as a test or example, it will include the - `std` feature. For example, `cargo test` or `cargo build --all-targets` will - unify these features. Note that dev-dependencies in dependencies are always - ignored, this is only relevant for the top-level package or workspace - members. - -[build-dependencies]: specifying-dependencies.md#build-dependencies -[dev-dependencies]: specifying-dependencies.md#development-dependencies -[resolver-field]: features.md#resolver-versions - -### `links` - -The [`links` field] is used to ensure only one copy of a native library is -linked into a binary. The resolver will attempt to find a graph where there is -only one instance of each `links` name. If it is unable to find a graph that -satisfies that constraint, it will return an error. - -For example, it is an error if one package depends on [`libgit2-sys`] version -`0.11` and another depends on `0.12`, because Cargo is unable to unify those, -but they both link to the `git2` native library. Due to this requirement, it -is encouraged to be very careful when making SemVer-incompatible releases with -the `links` field if your library is in common use. - -[`links` field]: manifest.md#the-links-field -[`libgit2-sys`]: https://crates.io/crates/libgit2-sys - -### Yanked versions - -[Yanked releases][yank] are those that are marked that they should not be -used. When the resolver is building the graph, it will ignore all yanked -releases unless they already exist in the `Cargo.lock` file. - -[yank]: publishing.md#cargo-yank - -## Dependency updates - -Dependency resolution is automatically performed by all Cargo commands that -need to know about the dependency graph. For example, [`cargo build`] will run -the resolver to discover all the dependencies to build. After the first time -it runs, the result is stored in the `Cargo.lock` file. Subsequent commands -will run the resolver, keeping dependencies locked to the versions in -`Cargo.lock` *if it can*. - -If the dependency list in `Cargo.toml` has been modified, for example changing -the version of a dependency from `1.0` to `2.0`, then the resolver will select -a new version for that dependency that matches the new requirements. If that -new dependency introduces new requirements, those new requirements may also -trigger additional updates. The `Cargo.lock` file will be updated with the new -result. The `--locked` or `--frozen` flags can be used to change this behavior -to prevent automatic updates when requirements change, and return an error -instead. - -[`cargo update`] can be used to update the entries in `Cargo.lock` when new -versions are published. Without any options, it will attempt to update all -packages in the lock file. The `-p` flag can be used to target the update for -a specific package, and other flags such as `--aggressive` or `--precise` can -be used to control how versions are selected. - -[`cargo build`]: ../commands/cargo-build.md -[`cargo update`]: ../commands/cargo-update.md - -## Overrides - -Cargo has several mechanisms to override dependencies within the graph. The -[Overriding Dependencies] chapter goes into detail on how to use overrides. -The overrides appear as an overlay to a registry, replacing the patched -version with the new entry. Otherwise, resolution is performed like normal. - -[Overriding Dependencies]: overriding-dependencies.md - -## Dependency kinds - -There are three kinds of dependencies in a package: normal, [build], and -[dev][dev-dependencies]. For the most part these are all treated the same from -the perspective of the resolver. One difference is that dev-dependencies for -non-workspace members are always ignored, and do not influence resolution. - -[Platform-specific dependencies] with the `[target]` table are resolved as-if -all platforms are enabled. In other words, the resolver ignores the platform -or `cfg` expression. - -[build]: specifying-dependencies.md#build-dependencies -[dev-dependencies]: specifying-dependencies.md#development-dependencies -[Platform-specific dependencies]: specifying-dependencies.md#platform-specific-dependencies - -### dev-dependency cycles - -Usually the resolver does not allow cycles in the graph, but it does allow -them for [dev-dependencies]. For example, project "foo" has a dev-dependency -on "bar", which has a normal dependency on "foo" (usually as a "path" -dependency). This is allowed because there isn't really a cycle from the -perspective of the build artifacts. In this example, the "foo" library is -built (which does not need "bar" because "bar" is only used for tests), and -then "bar" can be built depending on "foo", then the "foo" tests can be built -linking to "bar". - -Beware that this can lead to confusing errors. In the case of building library -unit tests, there are actually two copies of the library linked into the final -test binary: the one that was linked with "bar", and the one built that -contains the unit tests. Similar to the issues highlighted in the -[Version-incompatibility hazards] section, the types between the two are not -compatible. Be careful when exposing types of "foo" from "bar" in this -situation, since the "foo" unit tests won't treat them the same as the local -types. - -If possible, try to split your package into multiple packages and restructure -it so that it remains strictly acyclic. - -## Resolver versions - -A different feature resolver algorithm can be used by specifying the resolver -version in `Cargo.toml` like this: - -```toml -[package] -name = "my-package" -version = "1.0.0" -resolver = "2" -``` - -The version `"1"` resolver is the original resolver that shipped with Cargo up to version 1.50. -The default is `"2"` if the root package specifies [`edition = "2021"`](manifest.md#the-edition-field) or a newer edition. -Otherwise the default is `"1"`. - -The version `"2"` resolver introduces changes in [feature -unification](#features). See the [features chapter][features-2] for more -details. - -The resolver is a global option that affects the entire workspace. The -`resolver` version in dependencies is ignored, only the value in the top-level -package will be used. If using a [virtual workspace], the version should be -specified in the `[workspace]` table, for example: - -```toml -[workspace] -members = ["member1", "member2"] -resolver = "2" -``` - -[virtual workspace]: workspaces.md#virtual-manifest -[features-2]: features.md#feature-resolver-version-2 - -## Recommendations - -The following are some recommendations for setting the version within your -package, and for specifying dependency requirements. These are general -guidelines that should apply to common situations, but of course some -situations may require specifying unusual requirements. - -* Follow the [SemVer guidelines] when deciding how to update your version - number, and whether or not you will need to make a SemVer-incompatible - version change. -* Use caret requirements for dependencies, such as `"1.2.3"`, for most - situations. This ensures that the resolver can be maximally flexible in - choosing a version while maintaining build compatibility. - * Specify all three components with the version you are currently using. - This helps set the minimum version that will be used, and ensures that - other users won't end up with an older version of the dependency that - might be missing something that your package requires. - * Avoid `*` requirements, as they are not allowed on [crates.io], and they - can pull in SemVer-breaking changes during a normal `cargo update`. - * Avoid overly broad version requirements. For example, `>=2.0.0` can pull - in any SemVer-incompatible version, like version `5.0.0`, which can result - in broken builds in the future. - * Avoid overly narrow version requirements if possible. For example, if you - specify a tilde requirement like `bar="~1.3"`, and another package - specifies a requirement of `bar="1.4"`, this will fail to resolve, even - though minor releases should be compatible. -* Try to keep the dependency versions up-to-date with the actual minimum - versions that your library requires. For example, if you have a requirement - of `bar="1.0.12"`, and then in a future release you start using new features - added in the `1.1.0` release of "bar", update your dependency requirement to - `bar="1.1.0"`. - - If you fail to do this, it may not be immediately obvious because Cargo can - opportunistically choose the newest version when you run a blanket `cargo - update`. However, if another user depends on your library, and runs `cargo - update -p your-library`, it will *not* automatically update "bar" if it is - locked in their `Cargo.lock`. It will only update "bar" in that situation if - the dependency declaration is also updated. Failure to do so can cause - confusing build errors for the user using `cargo update -p`. -* If two packages are tightly coupled, then an `=` dependency requirement may - help ensure that they stay in sync. For example, a library with a companion - proc-macro library will sometimes make assumptions between the two libraries - that won't work well if the two are out of sync (and it is never expected to - use the two libraries independently). The parent library can use an `=` - requirement on the proc-macro, and re-export the macros for easy access. -* `0.0.x` versions can be used for packages that are permanently unstable. - -In general, the stricter you make the dependency requirements, the more likely -it will be for the resolver to fail. Conversely, if you use requirements that -are too loose, it may be possible for new versions to be published that will -break the build. - -[SemVer guidelines]: semver.md - -## Troubleshooting - -The following illustrates some problems you may experience, and some possible -solutions. - -### SemVer-breaking patch release breaks the build - -Sometimes a project may inadvertently publish a point release with a -SemVer-breaking change. When users update with `cargo update`, they will pick -up this new release, and then their build may break. In this situation, it is -recommended that the project should [yank] the release, and either remove the -SemVer-breaking change, or publish it as a new SemVer-major version increase. - -If the change happened in a third-party project, if possible try to -(politely!) work with the project to resolve the issue. - -While waiting for the release to be yanked, some workarounds depend on the -circumstances: - -* If your project is the end product (such as a binary executable), just avoid - updating the offending package in `Cargo.lock`. This can be done with the - `--precise` flag in [`cargo update`]. -* If you publish a binary on [crates.io], then you can temporarily add an `=` - requirement to force the dependency to a specific good version. - * Binary projects can alternatively recommend users to use the `--locked` - flag with [`cargo install`] to use the original `Cargo.lock` that contains - the known good version. -* Libraries may also consider publishing a temporary new release with stricter - requirements that avoid the troublesome dependency. You may want to consider - using range requirements (instead of `=`) to avoid overly-strict - requirements that may conflict with other packages using the same - dependency. Once the problem has been resolved, you can publish another - point release that relaxes the dependency back to a caret requirement. -* If it looks like the third-party project is unable or unwilling to yank the - release, then one option is to update your code to be compatible with the - changes, and update the dependency requirement to set the minimum version to - the new release. You will also need to consider if this is a SemVer-breaking - change of your own library, for example if it exposes types from the - dependency. - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/semver.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/semver.md deleted file mode 100644 index 0404efdcf..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/semver.md +++ /dev/null @@ -1,1349 +0,0 @@ -# SemVer Compatibility - -This chapter provides details on what is conventionally considered a -compatible or breaking SemVer change for new releases of a package. See the -[SemVer compatibility] section for details on what SemVer is, and how Cargo -uses it to ensure compatibility of libraries. - -These are only *guidelines*, and not necessarily hard-and-fast rules that all -projects will obey. The [Change categories] section details how this guide -classifies the level and severity of a change. Most of this guide focuses on -changes that will cause `cargo` and `rustc` to fail to build something that -previously worked. Almost every change carries some risk that it will -negatively affect the runtime behavior, and for those cases it is usually a -judgment call by the project maintainers whether or not it is a -SemVer-incompatible change. - -See also [rust-semverver], which is an experimental tool that attempts to -programmatically check compatibility rules. - -[Change categories]: #change-categories -[rust-semverver]: https://github.com/rust-dev-tools/rust-semverver -[SemVer compatibility]: resolver.md#semver-compatibility - -## Change categories - -All of the policies listed below are categorized by the level of change: - -* **Major change**: a change that requires a major SemVer bump. -* **Minor change**: a change that requires only a minor SemVer bump. -* **Possibly-breaking change**: a change that some projects may consider major - and others consider minor. - -The "Possibly-breaking" category covers changes that have the *potential* to -break during an update, but may not necessarily cause a breakage. The impact -of these changes should be considered carefully. The exact nature will depend -on the change and the principles of the project maintainers. - -Some projects may choose to only bump the patch number on a minor change. It -is encouraged to follow the SemVer spec, and only apply bug fixes in patch -releases. However, a bug fix may require an API change that is marked as a -"minor change", and shouldn't affect compatibility. This guide does not take a -stance on how each individual "minor change" should be treated, as the -difference between minor and patch changes are conventions that depend on the -nature of the change. - -Some changes are marked as "minor", even though they carry the potential risk -of breaking a build. This is for situations where the potential is extremely -low, and the potentially breaking code is unlikely to be written in idiomatic -Rust, or is specifically discouraged from use. - -This guide uses the terms "major" and "minor" assuming this relates to a -"1.0.0" release or later. Initial development releases starting with "0.y.z" -can treat changes in "y" as a major release, and "z" as a minor release. -"0.0.z" releases are always major changes. This is because Cargo uses the -convention that only changes in the left-most non-zero component are -considered incompatible. - -* API compatibility - * Items - * [Major: renaming/moving/removing any public items](#item-remove) - * [Minor: adding new public items](#item-new) - * Structs - * [Major: adding a private struct field when all current fields are public](#struct-add-private-field-when-public) - * [Major: adding a public field when no private field exists](#struct-add-public-field-when-no-private) - * [Minor: adding or removing private fields when at least one already exists](#struct-private-fields-with-private) - * [Minor: going from a tuple struct with all private fields (with at least one field) to a normal struct, or vice versa](#struct-tuple-normal-with-private) - * Enums - * [Major: adding new enum variants (without `non_exhaustive`)](#enum-variant-new) - * [Major: adding new fields to an enum variant](#enum-fields-new) - * Traits - * [Major: adding a non-defaulted trait item](#trait-new-item-no-default) - * [Major: any change to trait item signatures](#trait-item-signature) - * [Possibly-breaking: adding a defaulted trait item](#trait-new-default-item) - * [Major: adding a trait item that makes the trait non-object safe](#trait-object-safety) - * [Major: adding a type parameter without a default](#trait-new-parameter-no-default) - * [Minor: adding a defaulted trait type parameter](#trait-new-parameter-default) - * Implementations - * [Possibly-breaking change: adding any inherent items](#impl-item-new) - * Generics - * [Major: tightening generic bounds](#generic-bounds-tighten) - * [Minor: loosening generic bounds](#generic-bounds-loosen) - * [Minor: adding defaulted type parameters](#generic-new-default) - * [Minor: generalizing a type to use generics (with identical types)](#generic-generalize-identical) - * [Major: generalizing a type to use generics (with possibly different types)](#generic-generalize-different) - * [Minor: changing a generic type to a more generic type](#generic-more-generic) - * Functions - * [Major: adding/removing function parameters](#fn-change-arity) - * [Possibly-breaking: introducing a new function type parameter](#fn-generic-new) - * [Minor: generalizing a function to use generics (supporting original type)](#fn-generalize-compatible) - * [Major: generalizing a function to use generics with type mismatch](#fn-generalize-mismatch) - * Attributes - * [Major: switching from `no_std` support to requiring `std`](#attr-no-std-to-std) -* Tooling and environment compatibility - * [Possibly-breaking: changing the minimum version of Rust required](#env-new-rust) - * [Possibly-breaking: changing the platform and environment requirements](#env-change-requirements) - * Cargo - * [Minor: adding a new Cargo feature](#cargo-feature-add) - * [Major: removing a Cargo feature](#cargo-feature-remove) - * [Major: removing a feature from a feature list if that changes functionality or public items](#cargo-feature-remove-another) - * [Possibly-breaking: removing an optional dependency](#cargo-remove-opt-dep) - * [Minor: changing dependency features](#cargo-change-dep-feature) - * [Minor: adding dependencies](#cargo-dep-add) -* [Application compatibility](#application-compatibility) - -## API compatibility - -All of the examples below contain three parts: the original code, the code -after it has been modified, and an example usage of the code that could appear -in another project. In a minor change, the example usage should successfully -build with both the before and after versions. - - -### Major: renaming/moving/removing any public items - -The absence of a publicly exposed [item][items] will cause any uses of that item to -fail to compile. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub fn foo() {} - -/////////////////////////////////////////////////////////// -// After -// ... item has been removed - -/////////////////////////////////////////////////////////// -// Example usage that will break. -fn main() { - updated_crate::foo(); // Error: cannot find function `foo` -} -``` - -This includes adding any sort of [`cfg` attribute] which can change which -items or behavior is available based on [conditional compilation]. - -Mitigating strategies: -* Mark items to be removed as [deprecated], and then remove them at a later - date in a SemVer-breaking release. -* Mark renamed items as [deprecated], and use a [`pub use`] item to re-export - to the old name. - - -### Minor: adding new public items - -Adding new, public [items] is a minor change. - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -// ... absence of item - -/////////////////////////////////////////////////////////// -// After -pub fn foo() {} - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -// `foo` is not used since it didn't previously exist. -``` - -Note that in some rare cases this can be a **breaking change** due to glob -imports. For example, if you add a new trait, and a project has used a glob -import that brings that trait into scope, and the new trait introduces an -associated item that conflicts with any types it is implemented on, this can -cause a compile-time error due to the ambiguity. Example: - -```rust,ignore -// Breaking change example - -/////////////////////////////////////////////////////////// -// Before -// ... absence of trait - -/////////////////////////////////////////////////////////// -// After -pub trait NewTrait { - fn foo(&self) {} -} - -impl NewTrait for i32 {} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::*; - -pub trait LocalTrait { - fn foo(&self) {} -} - -impl LocalTrait for i32 {} - -fn main() { - 123i32.foo(); // Error: multiple applicable items in scope -} -``` - -This is not considered a major change because conventionally glob imports are -a known forwards-compatibility hazard. Glob imports of items from external -crates should be avoided. - - -### Major: adding a private struct field when all current fields are public - -When a private field is added to a struct that previously had all public fields, -this will break any code that attempts to construct it with a [struct literal]. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub struct Foo { - pub f1: i32, -} - -/////////////////////////////////////////////////////////// -// After -pub struct Foo { - pub f1: i32, - f2: i32, -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -fn main() { - let x = updated_crate::Foo { f1: 123 }; // Error: cannot construct `Foo` -} -``` - -Mitigation strategies: -* Do not add new fields to all-public field structs. -* Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing - a struct to prevent users from using struct literal syntax, and instead - provide a constructor method and/or [Default] implementation. - - -### Major: adding a public field when no private field exists - -When a public field is added to a struct that has all public fields, this will -break any code that attempts to construct it with a [struct literal]. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub struct Foo { - pub f1: i32, -} - -/////////////////////////////////////////////////////////// -// After -pub struct Foo { - pub f1: i32, - pub f2: i32, -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -fn main() { - let x = updated_crate::Foo { f1: 123 }; // Error: missing field `f2` -} -``` - -Mitigation strategies: -* Do not add new new fields to all-public field structs. -* Mark structs as [`#[non_exhaustive]`][non_exhaustive] when first introducing - a struct to prevent users from using struct literal syntax, and instead - provide a constructor method and/or [Default] implementation. - - -### Minor: adding or removing private fields when at least one already exists - -It is safe to add or remove private fields from a struct when the struct -already has at least one private field. - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -#[derive(Default)] -pub struct Foo { - f1: i32, -} - -/////////////////////////////////////////////////////////// -// After -#[derive(Default)] -pub struct Foo { - f2: f64, -} - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -fn main() { - // Cannot access private fields. - let x = updated_crate::Foo::default(); -} -``` - -This is safe because existing code cannot use a [struct literal] to construct -it, nor exhaustively match its contents. - -Note that for tuple structs, this is a **major change** if the tuple contains -public fields, and the addition or removal of a private field changes the -index of any public field. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -#[derive(Default)] -pub struct Foo(pub i32, i32); - -/////////////////////////////////////////////////////////// -// After -#[derive(Default)] -pub struct Foo(f64, pub i32, i32); - -/////////////////////////////////////////////////////////// -// Example usage that will break. -fn main() { - let x = updated_crate::Foo::default(); - let y = x.0; // Error: is private -} -``` - - -### Minor: going from a tuple struct with all private fields (with at least one field) to a normal struct, or vice versa - -Changing a tuple struct to a normal struct (or vice-versa) is safe if all -fields are private. - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -#[derive(Default)] -pub struct Foo(i32); - -/////////////////////////////////////////////////////////// -// After -#[derive(Default)] -pub struct Foo { - f1: i32, -} - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -fn main() { - // Cannot access private fields. - let x = updated_crate::Foo::default(); -} -``` - -This is safe because existing code cannot use a [struct literal] to construct -it, nor match its contents. - - -### Major: adding new enum variants (without `non_exhaustive`) - -It is a breaking change to add a new enum variant if the enum does not use the -[`#[non_exhaustive]`][non_exhaustive] attribute. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub enum E { - Variant1, -} - -/////////////////////////////////////////////////////////// -// After -pub enum E { - Variant1, - Variant2, -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -fn main() { - use updated_crate::E; - let x = E::Variant1; - match x { // Error: `Variant2` not covered - E::Variant1 => {} - } -} -``` - -Mitigation strategies: -* When introducing the enum, mark it as [`#[non_exhaustive]`][non_exhaustive] - to force users to use [wildcard patterns] to catch new variants. - - -### Major: adding new fields to an enum variant - -It is a breaking change to add new fields to an enum variant because all -fields are public, and constructors and matching will fail to compile. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub enum E { - Variant1 { f1: i32 }, -} - -/////////////////////////////////////////////////////////// -// After -pub enum E { - Variant1 { f1: i32, f2: i32 }, -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -fn main() { - use updated_crate::E; - let x = E::Variant1 { f1: 1 }; // Error: missing f2 - match x { - E::Variant1 { f1 } => {} // Error: missing f2 - } -} -``` - -Mitigation strategies: -* When introducing the enum, mark the variant as [`non_exhaustive`][non_exhaustive] - so that it cannot be constructed or matched without wildcards. - ```rust,ignore,skip - pub enum E { - #[non_exhaustive] - Variant1{f1: i32} - } - ``` -* When introducing the enum, use an explicit struct as a value, where you can - have control over the field visibility. - ```rust,ignore,skip - pub struct Foo { - f1: i32, - f2: i32, - } - pub enum E { - Variant1(Foo) - } - ``` - - -### Major: adding a non-defaulted trait item - -It is a breaking change to add a non-defaulted item to a trait. This will -break any implementors of the trait. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub trait Trait {} - -/////////////////////////////////////////////////////////// -// After -pub trait Trait { - fn foo(&self); -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::Trait; -struct Foo; - -impl Trait for Foo {} // Error: not all trait items implemented -``` - -Mitigation strategies: -* Always provide a default implementation or value for new associated trait - items. -* When introducing the trait, use the [sealed trait] technique to prevent - users outside of the crate from implementing the trait. - - -### Major: any change to trait item signatures - -It is a breaking change to make any change to a trait item signature. This can -break external implementors of the trait. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub trait Trait { - fn f(&self, x: i32) {} -} - -/////////////////////////////////////////////////////////// -// After -pub trait Trait { - // For sealed traits or normal functions, this would be a minor change - // because generalizing with generics strictly expands the possible uses. - // But in this case, trait implementations must use the same signature. - fn f(&self, x: V) {} -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::Trait; -struct Foo; - -impl Trait for Foo { - fn f(&self, x: i32) {} // Error: trait declaration has 1 type parameter -} -``` - -Mitigation strategies: -* Introduce new items with default implementations to cover the new - functionality instead of modifying existing items. -* When introducing the trait, use the [sealed trait] technique to prevent - users outside of the crate from implementing the trait. - - -### Possibly-breaking: adding a defaulted trait item - -It is usually safe to add a defaulted trait item. However, this can sometimes -cause a compile error. For example, this can introduce an ambiguity if a -method of the same name exists in another trait. - -```rust,ignore -// Breaking change example - -/////////////////////////////////////////////////////////// -// Before -pub trait Trait {} - -/////////////////////////////////////////////////////////// -// After -pub trait Trait { - fn foo(&self) {} -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::Trait; -struct Foo; - -trait LocalTrait { - fn foo(&self) {} -} - -impl Trait for Foo {} -impl LocalTrait for Foo {} - -fn main() { - let x = Foo; - x.foo(); // Error: multiple applicable items in scope -} -``` - -Note that this ambiguity does *not* exist for name collisions on [inherent -implementations], as they take priority over trait items. - -See [trait-object-safety](#trait-object-safety) for a special case to consider -when adding trait items. - -Mitigation strategies: -* Some projects may deem this acceptable breakage, particularly if the new - item name is unlikely to collide with any existing code. Choose names - carefully to help avoid these collisions. Additionally, it may be acceptable - to require downstream users to add [disambiguation syntax] to select the - correct function when updating the dependency. - - -### Major: adding a trait item that makes the trait non-object safe - -It is a breaking change to add a trait item that changes the trait to not be -[object safe]. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub trait Trait {} - -/////////////////////////////////////////////////////////// -// After -pub trait Trait { - // An associated const makes the trait not object-safe. - const CONST: i32 = 123; -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::Trait; -struct Foo; - -impl Trait for Foo {} - -fn main() { - let obj: Box = Box::new(Foo); // Error: cannot be made into an object -} -``` - -It is safe to do the converse (making a non-object safe trait into a safe -one). - - -### Major: adding a type parameter without a default - -It is a breaking change to add a type parameter without a default to a trait. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub trait Trait {} - -/////////////////////////////////////////////////////////// -// After -pub trait Trait {} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::Trait; -struct Foo; - -impl Trait for Foo {} // Error: missing generics -``` - -Mitigating strategies: -* See [adding a defaulted trait type parameter](#trait-new-parameter-default). - - -### Minor: adding a defaulted trait type parameter - -It is safe to add a type parameter to a trait as long as it has a default. -External implementors will use the default without needing to specify the -parameter. - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub trait Trait {} - -/////////////////////////////////////////////////////////// -// After -pub trait Trait {} - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -use updated_crate::Trait; -struct Foo; - -impl Trait for Foo {} -``` - - -### Possibly-breaking change: adding any inherent items - -Usually adding inherent items to an implementation should be safe because -inherent items take priority over trait items. However, in some cases the -collision can cause problems if the name is the same as an implemented trait -item with a different signature. - -```rust,ignore -// Breaking change example - -/////////////////////////////////////////////////////////// -// Before -pub struct Foo; - -/////////////////////////////////////////////////////////// -// After -pub struct Foo; - -impl Foo { - pub fn foo(&self) {} -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::Foo; - -trait Trait { - fn foo(&self, x: i32) {} -} - -impl Trait for Foo {} - -fn main() { - let x = Foo; - x.foo(1); // Error: this function takes 0 arguments -} -``` - -Note that if the signatures match, there would not be a compile-time error, -but possibly a silent change in runtime behavior (because it is now executing -a different function). - -Mitigation strategies: -* Some projects may deem this acceptable breakage, particularly if the new - item name is unlikely to collide with any existing code. Choose names - carefully to help avoid these collisions. Additionally, it may be acceptable - to require downstream users to add [disambiguation syntax] to select the - correct function when updating the dependency. - - -### Major: tightening generic bounds - -It is a breaking change to tighten generic bounds on a type since this can -break users expecting the looser bounds. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub struct Foo { - pub f1: A, -} - -/////////////////////////////////////////////////////////// -// After -pub struct Foo { - pub f1: A, -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::Foo; - -fn main() { - let s = Foo { f1: 1.23 }; // Error: the trait bound `{float}: Eq` is not satisfied -} -``` - - -### Minor: loosening generic bounds - -It is safe to loosen the generic bounds on a type, as it only expands what is -allowed. - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub struct Foo { - pub f1: A, -} - -/////////////////////////////////////////////////////////// -// After -pub struct Foo { - pub f1: A, -} - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -use updated_crate::Foo; - -fn main() { - let s = Foo { f1: 123 }; -} -``` - - -### Minor: adding defaulted type parameters - -It is safe to add a type parameter to a type as long as it has a default. All -existing references will use the default without needing to specify the -parameter. - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -#[derive(Default)] -pub struct Foo {} - -/////////////////////////////////////////////////////////// -// After -#[derive(Default)] -pub struct Foo { - f1: A, -} - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -use updated_crate::Foo; - -fn main() { - let s: Foo = Default::default(); -} -``` - - -### Minor: generalizing a type to use generics (with identical types) - -A struct or enum field can change from a concrete type to a generic type -parameter, provided that the change results in an identical type for all -existing use cases. For example, the following change is permitted: - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub struct Foo(pub u8); - -/////////////////////////////////////////////////////////// -// After -pub struct Foo(pub T); - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -use updated_crate::Foo; - -fn main() { - let s: Foo = Foo(123); -} -``` - -because existing uses of `Foo` are shorthand for `Foo` which yields the -identical field type. - - -### Major: generalizing a type to use generics (with possibly different types) - -Changing a struct or enum field from a concrete type to a generic type -parameter can break if the type can change. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub struct Foo(pub T, pub u8); - -/////////////////////////////////////////////////////////// -// After -pub struct Foo(pub T, pub T); - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::Foo; - -fn main() { - let s: Foo = Foo(3.14, 123); // Error: mismatched types -} -``` - - -### Minor: changing a generic type to a more generic type - -It is safe to change a generic type to a more generic one. For example, the -following adds a generic parameter that defaults to the original type, which -is safe because all existing users will be using the same type for both -fields, the the defaulted parameter does not need to be specified. - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub struct Foo(pub T, pub T); - -/////////////////////////////////////////////////////////// -// After -pub struct Foo(pub T, pub U); - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -use updated_crate::Foo; - -fn main() { - let s: Foo = Foo(1.0, 2.0); -} -``` - - -### Major: adding/removing function parameters - -Changing the arity of a function is a breaking change. - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub fn foo() {} - -/////////////////////////////////////////////////////////// -// After -pub fn foo(x: i32) {} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -fn main() { - updated_crate::foo(); // Error: this function takes 1 argument -} -``` - -Mitigating strategies: -* Introduce a new function with the new signature and possibly - [deprecate][deprecated] the old one. -* Introduce functions that take a struct argument, where the struct is built - with the builder pattern. This allows new fields to be added to the struct - in the future. - - -### Possibly-breaking: introducing a new function type parameter - -Usually, adding a non-defaulted type parameter is safe, but in some -cases it can be a breaking change: - -```rust,ignore -// Breaking change example - -/////////////////////////////////////////////////////////// -// Before -pub fn foo() {} - -/////////////////////////////////////////////////////////// -// After -pub fn foo() {} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::foo; - -fn main() { - foo::(); // Error: this function takes 2 generic arguments but 1 generic argument was supplied -} -``` - -However, such explicit calls are rare enough (and can usually be written in -other ways) that this breakage is usually acceptable. One should take into -account how likely it is that the function in question is being called with -explicit type arguments. - - -### Minor: generalizing a function to use generics (supporting original type) - -The type of an parameter to a function, or its return value, can be -*generalized* to use generics, including by introducing a new type parameter, -as long as it can be instantiated to the original type. For example, the -following changes are allowed: - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub fn foo(x: u8) -> u8 { - x -} -pub fn bar>(t: T) {} - -/////////////////////////////////////////////////////////// -// After -use std::ops::Add; -pub fn foo(x: T) -> T { - x -} -pub fn bar>(t: T) {} - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -use updated_crate::{bar, foo}; - -fn main() { - foo(1); - bar(vec![1, 2, 3].into_iter()); -} -``` - -because all existing uses are instantiations of the new signature. - -Perhaps somewhat surprisingly, generalization applies to trait objects as -well, given that every trait implements itself: - -```rust,ignore -// MINOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub trait Trait {} -pub fn foo(t: &dyn Trait) {} - -/////////////////////////////////////////////////////////// -// After -pub trait Trait {} -pub fn foo(t: &T) {} - -/////////////////////////////////////////////////////////// -// Example use of the library that will safely work. -use updated_crate::{foo, Trait}; - -struct Foo; -impl Trait for Foo {} - -fn main() { - let obj = Foo; - foo(&obj); -} -``` - -(The use of `?Sized` is essential; otherwise you couldn't recover the original -signature.) - -Introducing generics in this way can potentially create type inference -failures. These are usually rare, and may be acceptable breakage for some -projects, as this can be fixed with additional type annotations. - -```rust,ignore -// Breaking change example - -/////////////////////////////////////////////////////////// -// Before -pub fn foo() -> i32 { - 0 -} - -/////////////////////////////////////////////////////////// -// After -pub fn foo() -> T { - Default::default() -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::foo; - -fn main() { - let x = foo(); // Error: type annotations needed -} -``` - - -### Major: generalizing a function to use generics with type mismatch - -It is a breaking change to change a function parameter or return type if the -generic type constrains or changes the types previously allowed. For example, -the following adds a generic constraint that may not be satisfied by existing -code: - -```rust,ignore -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -pub fn foo(x: Vec) {} - -/////////////////////////////////////////////////////////// -// After -pub fn foo>(x: T) {} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -use updated_crate::foo; - -fn main() { - foo(vec![1, 2, 3]); // Error: `Copy` is not implemented for `Vec` -} -``` - - -### Major: switching from `no_std` support to requiring `std` - -If your library specifically supports a [`no_std`] environment, it is a -breaking change to make a new release that requires `std`. - -```rust,ignore,skip -// MAJOR CHANGE - -/////////////////////////////////////////////////////////// -// Before -#![no_std] -pub fn foo() {} - -/////////////////////////////////////////////////////////// -// After -pub fn foo() { - std::time::SystemTime::now(); -} - -/////////////////////////////////////////////////////////// -// Example usage that will break. -// This will fail to link for no_std targets because they don't have a `std` crate. -#![no_std] -use updated_crate::foo; - -fn example() { - foo(); -} -``` - -Mitigation strategies: -* A common idiom to avoid this is to include a `std` [Cargo feature] that - optionally enables `std` support, and when the feature is off, the library - can be used in a `no_std` environment. - -## Tooling and environment compatibility - - -### Possibly-breaking: changing the minimum version of Rust required - -Introducing the use of new features in a new release of Rust can break -projects that are using older versions of Rust. This also includes using new -features in a new release of Cargo, and requiring the use of a nightly-only -feature in a crate that previously worked on stable. - -Some projects choose to allow this in a minor release for various reasons. It -is usually relatively easy to update to a newer version of Rust. Rust also has -a rapid 6-week release cycle, and some projects will provide compatibility -within a window of releases (such as the current stable release plus N -previous releases). Just keep in mind that some large projects may not be able -to update their Rust toolchain rapidly. - -Mitigation strategies: -* Use [Cargo features] to make the new features opt-in. -* Provide a large window of support for older releases. -* Copy the source of new standard library items if possible so that you - can continue to use an older version but take advantage of the new feature. -* Provide a separate branch of older minor releases that can receive backports - of important bugfixes. -* Keep an eye out for the [`[cfg(version(..))]`][cfg-version] and - [`#[cfg(accessible(..))]`][cfg-accessible] features which provide an opt-in - mechanism for new features. These are currently unstable and only available - in the nightly channel. - - -### Possibly-breaking: changing the platform and environment requirements - -There is a very wide range of assumptions a library makes about the -environment that it runs in, such as the host platform, operating system -version, available services, filesystem support, etc. It can be a breaking -change if you make a new release that restricts what was previously supported, -for example requiring a newer version of an operating system. These changes -can be difficult to track, since you may not always know if a change breaks in -an environment that is not automatically tested. - -Some projects may deem this acceptable breakage, particularly if the breakage -is unlikely for most users, or the project doesn't have the resources to -support all environments. Another notable situation is when a vendor -discontinues support for some hardware or OS, the project may deem it -reasonable to also discontinue support. - -Mitigation strategies: -* Document the platforms and environments you specifically support. -* Test your code on a wide range of environments in CI. - -### Cargo - - -#### Minor: adding a new Cargo feature - -It is usually safe to add new [Cargo features]. If the feature introduces new -changes that cause a breaking change, this can cause difficulties for projects -that have stricter backwards-compatibility needs. In that scenario, avoid -adding the feature to the "default" list, and possibly document the -consequences of enabling the feature. - -```toml -# MINOR CHANGE - -########################################################### -# Before -[features] -# ..empty - -########################################################### -# After -[features] -std = [] -``` - - -#### Major: removing a Cargo feature - -It is usually a breaking change to remove [Cargo features]. This will cause -an error for any project that enabled the feature. - -```toml -# MAJOR CHANGE - -########################################################### -# Before -[features] -logging = [] - -########################################################### -# After -[dependencies] -# ..logging removed -``` - -Mitigation strategies: -* Clearly document your features. If there is an internal or experimental - feature, mark it as such, so that users know the status of the feature. -* Leave the old feature in `Cargo.toml`, but otherwise remove its - functionality. Document that the feature is deprecated, and remove it in a - future major SemVer release. - - -#### Major: removing a feature from a feature list if that changes functionality or public items - -If removing a feature from another feature, this can break existing users if -they are expecting that functionality to be available through that feature. - -```toml -# Breaking change example - -########################################################### -# Before -[features] -default = ["std"] -std = [] - -########################################################### -# After -[features] -default = [] # This may cause packages to fail if they are expecting std to be enabled. -std = [] -``` - - -#### Possibly-breaking: removing an optional dependency - -Removing an optional dependency can break a project using your library because -another project may be enabling that dependency via [Cargo features]. - -```toml -# Breaking change example - -########################################################### -# Before -[dependencies] -curl = { version = "0.4.31", optional = true } - -########################################################### -# After -[dependencies] -# ..curl removed -``` - -Mitigation strategies: -* Clearly document your features. If the optional dependency is not included - in the documented list of features, then you may decide to consider it safe - to change undocumented entries. -* Leave the optional dependency, and just don't use it within your library. -* Replace the optional dependency with a [Cargo feature] that does nothing, - and document that it is deprecated. -* Use high-level features which enable optional dependencies, and document - those as the preferred way to enable the extended functionality. For - example, if your library has optional support for something like - "networking", create a generic feature name "networking" that enables the - optional dependencies necessary to implement "networking". Then document the - "networking" feature. - - -#### Minor: changing dependency features - -It is usually safe to change the features on a dependency, as long as the -feature does not introduce a breaking change. - -```toml -# MINOR CHANGE - -########################################################### -# Before -[dependencies] -rand = { version = "0.7.3", features = ["small_rng"] } - - -########################################################### -# After -[dependencies] -rand = "0.7.3" -``` - - -#### Minor: adding dependencies - -It is usually safe to add new dependencies, as long as the new dependency -does not introduce new requirements that result in a breaking change. -For example, adding a new dependency that requires nightly in a project -that previously worked on stable is a major change. - -```toml -# MINOR CHANGE - -########################################################### -# Before -[dependencies] -# ..empty - -########################################################### -# After -[dependencies] -log = "0.4.11" -``` - -## Application compatibility - -Cargo projects may also include executable binaries which have their own -interfaces (such as a CLI interface, OS-level interaction, etc.). Since these -are part of the Cargo package, they often use and share the same version as -the package. You will need to decide if and how you want to employ a SemVer -contract with your users in the changes you make to your application. The -potential breaking and compatible changes to an application are too numerous -to list, so you are encouraged to use the spirit of the [SemVer] spec to guide -your decisions on how to apply versioning to your application, or at least -document what your commitments are. - -[`cfg` attribute]: ../../reference/conditional-compilation.md#the-cfg-attribute -[`no_std`]: ../../reference/names/preludes.html#the-no_std-attribute -[`pub use`]: ../../reference/items/use-declarations.html -[Cargo feature]: features.md -[Cargo features]: features.md -[cfg-accessible]: https://github.com/rust-lang/rust/issues/64797 -[cfg-version]: https://github.com/rust-lang/rust/issues/64796 -[conditional compilation]: ../../reference/conditional-compilation.md -[Default]: ../../std/default/trait.Default.html -[deprecated]: ../../reference/attributes/diagnostics.html#the-deprecated-attribute -[disambiguation syntax]: ../../reference/expressions/call-expr.html#disambiguating-function-calls -[inherent implementations]: ../../reference/items/implementations.html#inherent-implementations -[items]: ../../reference/items.html -[non_exhaustive]: ../../reference/attributes/type_system.html#the-non_exhaustive-attribute -[object safe]: ../../reference/items/traits.html#object-safety -[rust-feature]: https://doc.rust-lang.org/nightly/unstable-book/ -[sealed trait]: https://rust-lang.github.io/api-guidelines/future-proofing.html#sealed-traits-protect-against-downstream-implementations-c-sealed -[SemVer]: https://semver.org/ -[struct literal]: ../../reference/expressions/struct-expr.html -[wildcard patterns]: ../../reference/patterns.html#wildcard-pattern diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/source-replacement.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/source-replacement.md deleted file mode 100644 index 3ee3e7f0e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/source-replacement.md +++ /dev/null @@ -1,122 +0,0 @@ -## Source Replacement - -This document is about replacing the crate index. You can read about overriding -dependencies in the [overriding dependencies] section of this -documentation. - -A *source* is a provider that contains crates that may be included as -dependencies for a package. Cargo supports the ability to **replace one source -with another** to express strategies such as: - -* Vendoring - custom sources can be defined which represent crates on the local - filesystem. These sources are subsets of the source that they're replacing and - can be checked into packages if necessary. - -* Mirroring - sources can be replaced with an equivalent version which acts as a - cache for crates.io itself. - -Cargo has a core assumption about source replacement that the source code is -exactly the same from both sources. Note that this also means that -a replacement source is not allowed to have crates which are not present in the -original source. - -As a consequence, source replacement is not appropriate for situations such as -patching a dependency or a private registry. Cargo supports patching -dependencies through the usage of [the `[patch]` key][overriding -dependencies], and private registry support is described in [the Registries -chapter][registries]. - -[overriding dependencies]: overriding-dependencies.md -[registries]: registries.md - -### Configuration - -Configuration of replacement sources is done through [`.cargo/config.toml`][config] -and the full set of available keys are: - -```toml -# The `source` table is where all keys related to source-replacement -# are stored. -[source] - -# Under the `source` table are a number of other tables whose keys are a -# name for the relevant source. For example this section defines a new -# source, called `my-vendor-source`, which comes from a directory -# located at `vendor` relative to the directory containing this `.cargo/config.toml` -# file -[source.my-vendor-source] -directory = "vendor" - -# The crates.io default source for crates is available under the name -# "crates-io", and here we use the `replace-with` key to indicate that it's -# replaced with our source above. -[source.crates-io] -replace-with = "my-vendor-source" - -# Each source has its own table where the key is the name of the source -[source.the-source-name] - -# Indicate that `the-source-name` will be replaced with `another-source`, -# defined elsewhere -replace-with = "another-source" - -# Several kinds of sources can be specified (described in more detail below): -registry = "https://example.com/path/to/index" -local-registry = "path/to/registry" -directory = "path/to/vendor" - -# Git sources can optionally specify a branch/tag/rev as well -git = "https://example.com/path/to/repo" -# branch = "master" -# tag = "v1.0.1" -# rev = "313f44e8" -``` - -[config]: config.md - -### Registry Sources - -A "registry source" is one that is the same as crates.io itself. That is, it has -an index served in a git repository which matches the format of the -[crates.io index](https://github.com/rust-lang/crates.io-index). That repository -then has configuration indicating where to download crates from. - -Currently there is not an already-available project for setting up a mirror of -crates.io. Stay tuned though! - -### Local Registry Sources - -A "local registry source" is intended to be a subset of another registry -source, but available on the local filesystem (aka vendoring). Local registries -are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are -made up of a set of `*.crate` files and an index like the normal registry is. - -The primary way to manage and create local registry sources is through the -[`cargo-local-registry`][cargo-local-registry] subcommand, -[available on crates.io][cargo-local-registry] and can be installed with -`cargo install cargo-local-registry`. - -[cargo-local-registry]: https://crates.io/crates/cargo-local-registry - -Local registries are contained within one directory and contain a number of -`*.crate` files downloaded from crates.io as well as an `index` directory with -the same format as the crates.io-index project (populated with just entries for -the crates that are present). - -### Directory Sources - -A "directory source" is similar to a local registry source where it contains a -number of crates available on the local filesystem, suitable for vendoring -dependencies. Directory sources are primarily managed by the `cargo vendor` -subcommand. - -Directory sources are distinct from local registries though in that they contain -the unpacked version of `*.crate` files, making it more suitable in some -situations to check everything into source control. A directory source is just a -directory containing a number of other directories which contain the source code -for crates (the unpacked version of `*.crate` files). Currently no restriction -is placed on the name of each directory. - -Each crate in a directory source also has an associated metadata file indicating -the checksum of each file in the crate to protect against accidental -modifications. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/specifying-dependencies.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/specifying-dependencies.md deleted file mode 100644 index bfdadc7ef..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/specifying-dependencies.md +++ /dev/null @@ -1,477 +0,0 @@ -## Specifying Dependencies - -Your crates can depend on other libraries from [crates.io] or other -registries, `git` repositories, or subdirectories on your local file system. -You can also temporarily override the location of a dependency โ€” for example, -to be able to test out a bug fix in the dependency that you are working on -locally. You can have different dependencies for different platforms, and -dependencies that are only used during development. Let's take a look at how -to do each of these. - -### Specifying dependencies from crates.io - -Cargo is configured to look for dependencies on [crates.io] by default. Only -the name and a version string are required in this case. In [the cargo -guide](../guide/index.md), we specified a dependency on the `time` crate: - -```toml -[dependencies] -time = "0.1.12" -``` - -The string `"0.1.12"` is a version requirement. Although it looks like a -specific *version* of the `time` crate, it actually specifies a *range* of -versions and allows [SemVer] compatible updates. An update is allowed if the new -version number does not modify the left-most non-zero digit in the major, minor, -patch grouping. In this case, if we ran `cargo update -p time`, cargo should -update us to version `0.1.13` if it is the latest `0.1.z` release, but would not -update us to `0.2.0`. If instead we had specified the version string as `1.0`, -cargo should update to `1.1` if it is the latest `1.y` release, but not `2.0`. -The version `0.0.x` is not considered compatible with any other version. - -[SemVer]: https://semver.org - -Here are some more examples of version requirements and the versions that would -be allowed with them: - -```notrust -1.2.3 := >=1.2.3, <2.0.0 -1.2 := >=1.2.0, <2.0.0 -1 := >=1.0.0, <2.0.0 -0.2.3 := >=0.2.3, <0.3.0 -0.2 := >=0.2.0, <0.3.0 -0.0.3 := >=0.0.3, <0.0.4 -0.0 := >=0.0.0, <0.1.0 -0 := >=0.0.0, <1.0.0 -``` - -This compatibility convention is different from SemVer in the way it treats -versions before 1.0.0. While SemVer says there is no compatibility before -1.0.0, Cargo considers `0.x.y` to be compatible with `0.x.z`, where `y โ‰ฅ z` -and `x > 0`. - -It is possible to further tweak the logic for selecting compatible versions -using special operators, though it shouldn't be necessary most of the time. - -### Caret requirements - -**Caret requirements** are an alternative syntax for the default strategy, -`^1.2.3` is exactly equivalent to `1.2.3`. - -### Tilde requirements - -**Tilde requirements** specify a minimal version with some ability to update. -If you specify a major, minor, and patch version or only a major and minor -version, only patch-level changes are allowed. If you only specify a major -version, then minor- and patch-level changes are allowed. - -`~1.2.3` is an example of a tilde requirement. - -```notrust -~1.2.3 := >=1.2.3, <1.3.0 -~1.2 := >=1.2.0, <1.3.0 -~1 := >=1.0.0, <2.0.0 -``` - -### Wildcard requirements - -**Wildcard requirements** allow for any version where the wildcard is -positioned. - -`*`, `1.*` and `1.2.*` are examples of wildcard requirements. - -```notrust -* := >=0.0.0 -1.* := >=1.0.0, <2.0.0 -1.2.* := >=1.2.0, <1.3.0 -``` - -> **Note**: [crates.io] does not allow bare `*` versions. - -### Comparison requirements - -**Comparison requirements** allow manually specifying a version range or an -exact version to depend on. - -Here are some examples of comparison requirements: - -```notrust ->= 1.2.0 -> 1 -< 2 -= 1.2.3 -``` - -### Multiple requirements - -As shown in the examples above, multiple version requirements can be -separated with a comma, e.g., `>= 1.2, < 1.5`. - -### Specifying dependencies from other registries - -To specify a dependency from a registry other than [crates.io], first the -registry must be configured in a `.cargo/config.toml` file. See the [registries -documentation] for more information. In the dependency, set the `registry` key -to the name of the registry to use. - -```toml -[dependencies] -some-crate = { version = "1.0", registry = "my-registry" } -``` - -> **Note**: [crates.io] does not allow packages to be published with -> dependencies on other registries. - -[registries documentation]: registries.md - -### Specifying dependencies from `git` repositories - -To depend on a library located in a `git` repository, the minimum information -you need to specify is the location of the repository with the `git` key: - -```toml -[dependencies] -regex = { git = "https://github.com/rust-lang/regex" } -``` - -Cargo will fetch the `git` repository at this location then look for a -`Cargo.toml` for the requested crate anywhere inside the `git` repository -(not necessarily at the root - for example, specifying a member crate name -of a workspace and setting `git` to the repository containing the workspace). - -Since we havenโ€™t specified any other information, Cargo assumes that -we intend to use the latest commit on the main branch to build our package. -You can combine the `git` key with the `rev`, `tag`, or `branch` keys to -specify something else. Here's an example of specifying that you want to use -the latest commit on a branch named `next`: - -```toml -[dependencies] -regex = { git = "https://github.com/rust-lang/regex", branch = "next" } -``` - -Anything that is not a branch or tag falls under `rev`. This can be a commit -hash like `rev = "4c59b707"`, or a named reference exposed by the remote -repository such as `rev = "refs/pull/493/head"`. What references are available -varies by where the repo is hosted; GitHub in particular exposes a reference to -the most recent commit of every pull request as shown, but other git hosts often -provide something equivalent, possibly under a different naming scheme. - -Once a `git` dependency has been added, Cargo will lock that dependency to the -latest commit at the time. New commits will not be pulled down automatically -once the lock is in place. However, they can be pulled down manually with -`cargo update`. - -See [Git Authentication] for help with git authentication for private repos. - -> **Note**: [crates.io] does not allow packages to be published with `git` -> dependencies (`git` [dev-dependencies] are ignored). See the [Multiple -> locations](#multiple-locations) section for a fallback alternative. - -[Git Authentication]: ../appendix/git-authentication.md - -### Specifying path dependencies - -Over time, our `hello_world` package from [the guide](../guide/index.md) has -grown significantly in size! Itโ€™s gotten to the point that we probably want to -split out a separate crate for others to use. To do this Cargo supports **path -dependencies** which are typically sub-crates that live within one repository. -Letโ€™s start off by making a new crate inside of our `hello_world` package: - -```console -# inside of hello_world/ -$ cargo new hello_utils -``` - -This will create a new folder `hello_utils` inside of which a `Cargo.toml` and -`src` folder are ready to be configured. In order to tell Cargo about this, open -up `hello_world/Cargo.toml` and add `hello_utils` to your dependencies: - -```toml -[dependencies] -hello_utils = { path = "hello_utils" } -``` - -This tells Cargo that we depend on a crate called `hello_utils` which is found -in the `hello_utils` folder (relative to the `Cargo.toml` itโ€™s written in). - -And thatโ€™s it! The next `cargo build` will automatically build `hello_utils` and -all of its own dependencies, and others can also start using the crate as well. -However, crates that use dependencies specified with only a path are not -permitted on [crates.io]. If we wanted to publish our `hello_world` crate, we -would need to publish a version of `hello_utils` to [crates.io] -and specify its version in the dependencies line as well: - -```toml -[dependencies] -hello_utils = { path = "hello_utils", version = "0.1.0" } -``` - -> **Note**: [crates.io] does not allow packages to be published with `path` -> dependencies (`path` [dev-dependencies] are ignored). See the [Multiple -> locations](#multiple-locations) section for a fallback alternative. - -### Multiple locations - -It is possible to specify both a registry version and a `git` or `path` -location. The `git` or `path` dependency will be used locally (in which case -the `version` is checked against the local copy), and when published to a -registry like [crates.io], it will use the registry version. Other -combinations are not allowed. Examples: - -```toml -[dependencies] -# Uses `my-bitflags` when used locally, and uses -# version 1.0 from crates.io when published. -bitflags = { path = "my-bitflags", version = "1.0" } - -# Uses the given git repo when used locally, and uses -# version 1.0 from crates.io when published. -smallvec = { git = "https://github.com/servo/rust-smallvec", version = "1.0" } - -# N.B. that if a version doesn't match, Cargo will fail to compile! -``` - -One example where this can be useful is when you have split up a library into -multiple packages within the same workspace. You can then use `path` -dependencies to point to the local packages within the workspace to use the -local version during development, and then use the [crates.io] version once it -is published. This is similar to specifying an -[override](overriding-dependencies.md), but only applies to this one -dependency declaration. - -### Platform specific dependencies - -Platform-specific dependencies take the same format, but are listed under a -`target` section. Normally Rust-like [`#[cfg]` -syntax](../../reference/conditional-compilation.html) will be used to define -these sections: - -```toml -[target.'cfg(windows)'.dependencies] -winhttp = "0.4.0" - -[target.'cfg(unix)'.dependencies] -openssl = "1.0.1" - -[target.'cfg(target_arch = "x86")'.dependencies] -native = { path = "native/i686" } - -[target.'cfg(target_arch = "x86_64")'.dependencies] -native = { path = "native/x86_64" } -``` - -Like with Rust, the syntax here supports the `not`, `any`, and `all` operators -to combine various cfg name/value pairs. - -If you want to know which cfg targets are available on your platform, run -`rustc --print=cfg` from the command line. If you want to know which `cfg` -targets are available for another platform, such as 64-bit Windows, -run `rustc --print=cfg --target=x86_64-pc-windows-msvc`. - -Unlike in your Rust source code, you cannot use -`[target.'cfg(feature = "fancy-feature")'.dependencies]` to add dependencies -based on optional features. Use [the `[features]` section](features.md) -instead: - -```toml -[dependencies] -foo = { version = "1.0", optional = true } -bar = { version = "1.0", optional = true } - -[features] -fancy-feature = ["foo", "bar"] -``` - -The same applies to `cfg(debug_assertions)`, `cfg(test)` and `cfg(proc_macro)`. -These values will not work as expected and will always have the default value -returned by `rustc --print=cfg`. -There is currently no way to add dependencies based on these configuration values. - -In addition to `#[cfg]` syntax, Cargo also supports listing out the full target -the dependencies would apply to: - -```toml -[target.x86_64-pc-windows-gnu.dependencies] -winhttp = "0.4.0" - -[target.i686-unknown-linux-gnu.dependencies] -openssl = "1.0.1" -``` - -#### Custom target specifications - -If youโ€™re using a custom target specification (such as `--target -foo/bar.json`), use the base filename without the `.json` extension: - -```toml -[target.bar.dependencies] -winhttp = "0.4.0" - -[target.my-special-i686-platform.dependencies] -openssl = "1.0.1" -native = { path = "native/i686" } -``` - -> **Note**: Custom target specifications are not usable on the stable channel. - -### Development dependencies - -You can add a `[dev-dependencies]` section to your `Cargo.toml` whose format -is equivalent to `[dependencies]`: - -```toml -[dev-dependencies] -tempdir = "0.3" -``` - -Dev-dependencies are not used when compiling -a package for building, but are used for compiling tests, examples, and -benchmarks. - -These dependencies are *not* propagated to other packages which depend on this -package. - -You can also have target-specific development dependencies by using -`dev-dependencies` in the target section header instead of `dependencies`. For -example: - -```toml -[target.'cfg(unix)'.dev-dependencies] -mio = "0.0.1" -``` - -> **Note**: When a package is published, only dev-dependencies that specify a -> `version` will be included in the published crate. For most use cases, -> dev-dependencies are not needed when published, though some users (like OS -> packagers) may want to run tests within a crate, so providing a `version` if -> possible can still be beneficial. - -### Build dependencies - -You can depend on other Cargo-based crates for use in your build scripts. -Dependencies are declared through the `build-dependencies` section of the -manifest: - -```toml -[build-dependencies] -cc = "1.0.3" -``` - - -You can also have target-specific build dependencies by using -`build-dependencies` in the target section header instead of `dependencies`. For -example: - -```toml -[target.'cfg(unix)'.build-dependencies] -cc = "1.0.3" -``` - -In this case, the dependency will only be built when the host platform matches the -specified target. - -The build script **does not** have access to the dependencies listed -in the `dependencies` or `dev-dependencies` section. Build -dependencies will likewise not be available to the package itself -unless listed under the `dependencies` section as well. A package -itself and its build script are built separately, so their -dependencies need not coincide. Cargo is kept simpler and cleaner by -using independent dependencies for independent purposes. - -### Choosing features - -If a package you depend on offers conditional features, you can -specify which to use: - -```toml -[dependencies.awesome] -version = "1.3.5" -default-features = false # do not include the default features, and optionally - # cherry-pick individual features -features = ["secure-password", "civet"] -``` - -More information about features can be found in the [features -chapter](features.md#dependency-features). - -### Renaming dependencies in `Cargo.toml` - -When writing a `[dependencies]` section in `Cargo.toml` the key you write for a -dependency typically matches up to the name of the crate you import from in the -code. For some projects, though, you may wish to reference the crate with a -different name in the code regardless of how it's published on crates.io. For -example you may wish to: - -* Avoid the need to `use foo as bar` in Rust source. -* Depend on multiple versions of a crate. -* Depend on crates with the same name from different registries. - -To support this Cargo supports a `package` key in the `[dependencies]` section -of which package should be depended on: - -```toml -[package] -name = "mypackage" -version = "0.0.1" - -[dependencies] -foo = "0.1" -bar = { git = "https://github.com/example/project", package = "foo" } -baz = { version = "0.1", registry = "custom", package = "foo" } -``` - -In this example, three crates are now available in your Rust code: - -```rust,ignore -extern crate foo; // crates.io -extern crate bar; // git repository -extern crate baz; // registry `custom` -``` - -All three of these crates have the package name of `foo` in their own -`Cargo.toml`, so we're explicitly using the `package` key to inform Cargo that -we want the `foo` package even though we're calling it something else locally. -The `package` key, if not specified, defaults to the name of the dependency -being requested. - -Note that if you have an optional dependency like: - -```toml -[dependencies] -bar = { version = "0.1", package = 'foo', optional = true } -``` - -you're depending on the crate `foo` from crates.io, but your crate has a `bar` -feature instead of a `foo` feature. That is, names of features take after the -name of the dependency, not the package name, when renamed. - -Enabling transitive dependencies works similarly, for example we could add the -following to the above manifest: - -```toml -[features] -log-debug = ['bar/log-debug'] # using 'foo/log-debug' would be an error! -``` - -[crates.io]: https://crates.io/ -[dev-dependencies]: #development-dependencies - - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/unstable.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/unstable.md deleted file mode 100644 index ff29e8f2b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/unstable.md +++ /dev/null @@ -1,1377 +0,0 @@ -## Unstable Features - -Experimental Cargo features are only available on the [nightly channel]. You -are encouraged to experiment with these features to see if they meet your -needs, and if there are any issues or problems. Check the linked tracking -issues listed below for more information on the feature, and click the GitHub -subscribe button if you want future updates. - -After some period of time, if the feature does not have any major concerns, it -can be [stabilized], which will make it available on stable once the current -nightly release reaches the stable channel (anywhere from 6 to 12 weeks). - -There are three different ways that unstable features can be enabled based on -how the feature works: - -* New syntax in `Cargo.toml` requires a `cargo-features` key at the top of - `Cargo.toml`, before any tables. For example: - - ```toml - # This specifies which new Cargo.toml features are enabled. - cargo-features = ["test-dummy-unstable"] - - [package] - name = "my-package" - version = "0.1.0" - im-a-teapot = true # This is a new option enabled by test-dummy-unstable. - ``` - -* New command-line flags, options, and subcommands require the `-Z - unstable-options` CLI option to also be included. For example, the new - `--out-dir` option is only available on nightly: - - ```cargo +nightly build --out-dir=out -Z unstable-options``` - -* `-Z` command-line flags are used to enable new functionality that may not - have an interface, or the interface has not yet been designed, or for more - complex features that affect multiple parts of Cargo. For example, the - [timings](#timings) feature can be enabled with: - - ```cargo +nightly build -Z timings``` - - Run `cargo -Z help` to see a list of flags available. - - Anything which can be configured with a `-Z` flag can also be set in the - cargo [config file] (`.cargo/config.toml`) in the `unstable` table. For - example: - - ```toml - [unstable] - mtime-on-use = true - multitarget = true - timings = ["html"] - ``` - -Each new feature described below should explain how to use it. - -[config file]: config.md -[nightly channel]: ../../book/appendix-07-nightly-rust.html -[stabilized]: https://doc.crates.io/contrib/process/unstable.html#stabilization - -### List of unstable features - -* Unstable-specific features - * [-Z allow-features](#allow-features) โ€” Provides a way to restrict which unstable features are used. -* Build scripts and linking - * [Metabuild](#metabuild) โ€” Provides declarative build scripts. -* Resolver and features - * [no-index-update](#no-index-update) โ€” Prevents cargo from updating the index cache. - * [avoid-dev-deps](#avoid-dev-deps) โ€” Prevents the resolver from including dev-dependencies during resolution. - * [minimal-versions](#minimal-versions) โ€” Forces the resolver to use the lowest compatible version instead of the highest. - * [public-dependency](#public-dependency) โ€” Allows dependencies to be classified as either public or private. - * [Namespaced features](#namespaced-features) โ€” Separates optional dependencies into a separate namespace from regular features, and allows feature names to be the same as some dependency name. - * [Weak dependency features](#weak-dependency-features) โ€” Allows setting features for dependencies without enabling optional dependencies. -* Output behavior - * [out-dir](#out-dir) โ€” Adds a directory where artifacts are copied to. - * [terminal-width](#terminal-width) โ€” Tells rustc the width of the terminal so that long diagnostic messages can be truncated to be more readable. - * [Different binary name](#different-binary-name) โ€” Assign a name to the built binary that is seperate from the crate name. -* Compile behavior - * [mtime-on-use](#mtime-on-use) โ€” Updates the last-modified timestamp on every dependency every time it is used, to provide a mechanism to delete unused artifacts. - * [doctest-xcompile](#doctest-xcompile) โ€” Supports running doctests with the `--target` flag. - * [multitarget](#multitarget) โ€” Supports building for multiple targets at the same time. - * [build-std](#build-std) โ€” Builds the standard library instead of using pre-built binaries. - * [build-std-features](#build-std-features) โ€” Sets features to use with the standard library. - * [binary-dep-depinfo](#binary-dep-depinfo) โ€” Causes the dep-info file to track binary dependencies. - * [panic-abort-tests](#panic-abort-tests) โ€” Allows running tests with the "abort" panic strategy. - * [crate-type](#crate-type) - Supports passing crate types to the compiler. -* rustdoc - * [`doctest-in-workspace`](#doctest-in-workspace) โ€” Fixes workspace-relative paths when running doctests. - * [rustdoc-map](#rustdoc-map) โ€” Provides mappings for documentation to link to external sites like [docs.rs](https://docs.rs/). -* `Cargo.toml` extensions - * [Profile `strip` option](#profile-strip-option) โ€” Forces the removal of debug information and symbols from executables. - * [per-package-target](#per-package-target) โ€” Sets the `--target` to use for each individual package. -* Information and metadata - * [Build-plan](#build-plan) โ€” Emits JSON information on which commands will be run. - * [timings](#timings) โ€” Generates a report on how long individual dependencies took to run. - * [unit-graph](#unit-graph) โ€” Emits JSON for Cargo's internal graph structure. - * [`cargo rustc --print`](#rustc---print) โ€” Calls rustc with `--print` to display information from rustc. -* Configuration - * [config-cli](#config-cli) โ€” Adds the ability to pass configuration options on the command-line. - * [config-include](#config-include) โ€” Adds the ability for config files to include other files. - * [`cargo config`](#cargo-config) โ€” Adds a new subcommand for viewing config files. -* Registries - * [credential-process](#credential-process) โ€” Adds support for fetching registry tokens from an external authentication program. - * [`cargo logout`](#cargo-logout) โ€” Adds the `logout` command to remove the currently saved registry token. - -### allow-features - -This permanently-unstable flag makes it so that only a listed set of -unstable features can be used. Specifically, if you pass -`-Zallow-features=foo,bar`, you'll continue to be able to pass `-Zfoo` -and `-Zbar` to `cargo`, but you will be unable to pass `-Zbaz`. You can -pass an empty string (`-Zallow-features=`) to disallow all unstable -features. - -`-Zallow-features` also restricts which unstable features can be passed -to the `cargo-features` entry in `Cargo.toml`. If, for example, you want -to allow - -```toml -cargo-features = ["test-dummy-unstable"] -``` - -where `test-dummy-unstable` is unstable, that features would also be -disallowed by `-Zallow-features=`, and allowed with -`-Zallow-features=test-dummy-unstable`. - -The list of features passed to cargo's `-Zallow-features` is also passed -to any Rust tools that cargo ends up calling (like `rustc` or -`rustdoc`). Thus, if you run `cargo -Zallow-features=`, no unstable -Cargo _or_ Rust features can be used. - -### no-index-update -* Original Issue: [#3479](https://github.com/rust-lang/cargo/issues/3479) -* Tracking Issue: [#7404](https://github.com/rust-lang/cargo/issues/7404) - -The `-Z no-index-update` flag ensures that Cargo does not attempt to update -the registry index. This is intended for tools such as Crater that issue many -Cargo commands, and you want to avoid the network latency for updating the -index each time. - -### mtime-on-use -* Original Issue: [#6477](https://github.com/rust-lang/cargo/pull/6477) -* Cache usage meta tracking issue: [#7150](https://github.com/rust-lang/cargo/issues/7150) - -The `-Z mtime-on-use` flag is an experiment to have Cargo update the mtime of -used files to make it easier for tools like cargo-sweep to detect which files -are stale. For many workflows this needs to be set on *all* invocations of cargo. -To make this more practical setting the `unstable.mtime_on_use` flag in `.cargo/config.toml` -or the corresponding ENV variable will apply the `-Z mtime-on-use` to all -invocations of nightly cargo. (the config flag is ignored by stable) - -### avoid-dev-deps -* Original Issue: [#4988](https://github.com/rust-lang/cargo/issues/4988) -* Tracking Issue: [#5133](https://github.com/rust-lang/cargo/issues/5133) - -When running commands such as `cargo install` or `cargo build`, Cargo -currently requires dev-dependencies to be downloaded, even if they are not -used. The `-Z avoid-dev-deps` flag allows Cargo to avoid downloading -dev-dependencies if they are not needed. The `Cargo.lock` file will not be -generated if dev-dependencies are skipped. - -### minimal-versions -* Original Issue: [#4100](https://github.com/rust-lang/cargo/issues/4100) -* Tracking Issue: [#5657](https://github.com/rust-lang/cargo/issues/5657) - -> Note: It is not recommended to use this feature. Because it enforces minimal -> versions for all transitive dependencies, its usefulness is limited since -> not all external dependencies declare proper lower version bounds. It is -> intended that it will be changed in the future to only enforce minimal -> versions for direct dependencies. - -When a `Cargo.lock` file is generated, the `-Z minimal-versions` flag will -resolve the dependencies to the minimum SemVer version that will satisfy the -requirements (instead of the greatest version). - -The intended use-case of this flag is to check, during continuous integration, -that the versions specified in Cargo.toml are a correct reflection of the -minimum versions that you are actually using. That is, if Cargo.toml says -`foo = "1.0.0"` that you don't accidentally depend on features added only in -`foo 1.5.0`. - -### out-dir -* Original Issue: [#4875](https://github.com/rust-lang/cargo/issues/4875) -* Tracking Issue: [#6790](https://github.com/rust-lang/cargo/issues/6790) - -This feature allows you to specify the directory where artifacts will be -copied to after they are built. Typically artifacts are only written to the -`target/release` or `target/debug` directories. However, determining the -exact filename can be tricky since you need to parse JSON output. The -`--out-dir` flag makes it easier to predictably access the artifacts. Note -that the artifacts are copied, so the originals are still in the `target` -directory. Example: - -```sh -cargo +nightly build --out-dir=out -Z unstable-options -``` - -This can also be specified in `.cargo/config.toml` files. - -```toml -[build] -out-dir = "out" -``` - -### doctest-xcompile -* Tracking Issue: [#7040](https://github.com/rust-lang/cargo/issues/7040) -* Tracking Rustc Issue: [#64245](https://github.com/rust-lang/rust/issues/64245) - -This flag changes `cargo test`'s behavior when handling doctests when -a target is passed. Currently, if a target is passed that is different -from the host cargo will simply skip testing doctests. If this flag is -present, cargo will continue as normal, passing the tests to doctest, -while also passing it a `--target` option, as well as enabling -`-Zunstable-features --enable-per-target-ignores` and passing along -information from `.cargo/config.toml`. See the rustc issue for more information. - -```sh -cargo test --target foo -Zdoctest-xcompile -``` - -### multitarget -* Tracking Issue: [#8176](https://github.com/rust-lang/cargo/issues/8176) - -This flag allows passing multiple `--target` flags to the `cargo` subcommand -selected. When multiple `--target` flags are passed the selected build targets -will be built for each of the selected architectures. - -For example to compile a library for both 32 and 64-bit: - -``` -cargo build --target x86_64-unknown-linux-gnu --target i686-unknown-linux-gnu -``` - -or running tests for both targets: - -``` -cargo test --target x86_64-unknown-linux-gnu --target i686-unknown-linux-gnu -``` - - -#### New `dir-name` attribute - -Some of the paths generated under `target/` have resulted in a de-facto "build -protocol", where `cargo` is invoked as a part of a larger project build. So, to -preserve the existing behavior, there is also a new attribute `dir-name`, which -when left unspecified, defaults to the name of the profile. For example: - -```toml -[profile.release-lto] -inherits = "release" -dir-name = "lto" # Emits to target/lto instead of target/release-lto -lto = true -``` - - -### Namespaced features -* Original issue: [#1286](https://github.com/rust-lang/cargo/issues/1286) -* Tracking Issue: [#5565](https://github.com/rust-lang/cargo/issues/5565) - -The `namespaced-features` option makes two changes to how features can be -specified: - -* Features may now be defined with the same name as a dependency. -* Optional dependencies can be explicitly enabled in the `[features]` table - with the `dep:` prefix, which enables the dependency without enabling a - feature of the same name. - -By default, an optional dependency `foo` will define a feature `foo = -["dep:foo"]` *unless* `dep:foo` is mentioned in any other feature, or the -`foo` feature is already defined. This helps prevent unnecessary boilerplate -of listing every optional dependency, but still allows you to override the -implicit feature. - -This allows two use cases that were previously not possible: - -* You can "hide" an optional dependency, so that external users cannot - explicitly enable that optional dependency. -* There is no longer a need to create "funky" feature names to work around the - restriction that features cannot shadow dependency names. - -To enable namespaced-features, use the `-Z namespaced-features` command-line -flag. - -An example of hiding an optional dependency: - -```toml -[dependencies] -regex = { version = "1.4.1", optional = true } -lazy_static = { version = "1.4.0", optional = true } - -[features] -regex = ["dep:regex", "dep:lazy_static"] -``` - -In this example, the "regex" feature enables both `regex` and `lazy_static`. -The `lazy_static` feature does not exist, and a user cannot explicitly enable -it. This helps hide internal details of how your package is implemented. - -An example of avoiding "funky" names: - -```toml -[dependencies] -bigdecimal = "0.1" -chrono = "0.4" -num-bigint = "0.2" -serde = {version = "1.0", optional = true } - -[features] -serde = ["dep:serde", "bigdecimal/serde", "chrono/serde", "num-bigint/serde"] -``` - -In this case, `serde` is a natural name to use for a feature, because it is -relevant to your exported API. However, previously you would need to use a -name like `serde1` to work around the naming limitation if you wanted to also -enable other features. - -### Build-plan -* Tracking Issue: [#5579](https://github.com/rust-lang/cargo/issues/5579) - -The `--build-plan` argument for the `build` command will output JSON with -information about which commands would be run without actually executing -anything. This can be useful when integrating with another build tool. -Example: - -```sh -cargo +nightly build --build-plan -Z unstable-options -``` - -### Metabuild -* Tracking Issue: [rust-lang/rust#49803](https://github.com/rust-lang/rust/issues/49803) -* RFC: [#2196](https://github.com/rust-lang/rfcs/blob/master/text/2196-metabuild.md) - -Metabuild is a feature to have declarative build scripts. Instead of writing -a `build.rs` script, you specify a list of build dependencies in the -`metabuild` key in `Cargo.toml`. A build script is automatically generated -that runs each build dependency in order. Metabuild packages can then read -metadata from `Cargo.toml` to specify their behavior. - -Include `cargo-features` at the top of `Cargo.toml`, a `metabuild` key in the -`package`, list the dependencies in `build-dependencies`, and add any metadata -that the metabuild packages require under `package.metadata`. Example: - -```toml -cargo-features = ["metabuild"] - -[package] -name = "mypackage" -version = "0.0.1" -metabuild = ["foo", "bar"] - -[build-dependencies] -foo = "1.0" -bar = "1.0" - -[package.metadata.foo] -extra-info = "qwerty" -``` - -Metabuild packages should have a public function called `metabuild` that -performs the same actions as a regular `build.rs` script would perform. - -### public-dependency -* Tracking Issue: [#44663](https://github.com/rust-lang/rust/issues/44663) - -The 'public-dependency' feature allows marking dependencies as 'public' -or 'private'. When this feature is enabled, additional information is passed to rustc to allow -the 'exported_private_dependencies' lint to function properly. - -This requires the appropriate key to be set in `cargo-features`: - -```toml -cargo-features = ["public-dependency"] - -[dependencies] -my_dep = { version = "1.2.3", public = true } -private_dep = "2.0.0" # Will be 'private' by default -``` - -### build-std -* Tracking Repository: - -The `build-std` feature enables Cargo to compile the standard library itself as -part of a crate graph compilation. This feature has also historically been known -as "std-aware Cargo". This feature is still in very early stages of development, -and is also a possible massive feature addition to Cargo. This is a very large -feature to document, even in the minimal form that it exists in today, so if -you're curious to stay up to date you'll want to follow the [tracking -repository](https://github.com/rust-lang/wg-cargo-std-aware) and its set of -issues. - -The functionality implemented today is behind a flag called `-Z build-std`. This -flag indicates that Cargo should compile the standard library from source code -using the same profile as the main build itself. Note that for this to work you -need to have the source code for the standard library available, and at this -time the only supported method of doing so is to add the `rust-src` rust rustup -component: - -```console -$ rustup component add rust-src --toolchain nightly -``` - -It is also required today that the `-Z build-std` flag is combined with the -`--target` flag. Note that you're not forced to do a cross compilation, you're -just forced to pass `--target` in one form or another. - -Usage looks like: - -```console -$ cargo new foo -$ cd foo -$ cargo +nightly run -Z build-std --target x86_64-unknown-linux-gnu - Compiling core v0.0.0 (...) - ... - Compiling foo v0.1.0 (...) - Finished dev [unoptimized + debuginfo] target(s) in 21.00s - Running `target/x86_64-unknown-linux-gnu/debug/foo` -Hello, world! -``` - -Here we recompiled the standard library in debug mode with debug assertions -(like `src/main.rs` is compiled) and everything was linked together at the end. - -Using `-Z build-std` will implicitly compile the stable crates `core`, `std`, -`alloc`, and `proc_macro`. If you're using `cargo test` it will also compile the -`test` crate. If you're working with an environment which does not support some -of these crates, then you can pass an argument to `-Zbuild-std` as well: - -```console -$ cargo +nightly build -Z build-std=core,alloc -``` - -The value here is a comma-separated list of standard library crates to build. - -#### Requirements - -As a summary, a list of requirements today to use `-Z build-std` are: - -* You must install libstd's source code through `rustup component add rust-src` -* You must pass `--target` -* You must use both a nightly Cargo and a nightly rustc -* The `-Z build-std` flag must be passed to all `cargo` invocations. - -#### Reporting bugs and helping out - -The `-Z build-std` feature is in the very early stages of development! This -feature for Cargo has an extremely long history and is very large in scope, and -this is just the beginning. If you'd like to report bugs please either report -them to: - -* Cargo - - for implementation bugs -* The tracking repository - - - for larger design - questions. - -Also if you'd like to see a feature that's not yet implemented and/or if -something doesn't quite work the way you'd like it to, feel free to check out -the [issue tracker](https://github.com/rust-lang/wg-cargo-std-aware/issues) of -the tracking repository, and if it's not there please file a new issue! - -### build-std-features -* Tracking Repository: - -This flag is a sibling to the `-Zbuild-std` feature flag. This will configure -the features enabled for the standard library itself when building the standard -library. The default enabled features, at this time, are `backtrace` and -`panic_unwind`. This flag expects a comma-separated list and, if provided, will -override the default list of features enabled. - -### timings -* Tracking Issue: [#7405](https://github.com/rust-lang/cargo/issues/7405) - -The `timings` feature gives some information about how long each compilation -takes, and tracks concurrency information over time. - -```sh -cargo +nightly build -Z timings -``` - -The `-Ztimings` flag can optionally take a comma-separated list of the -following values: - -- `html` โ€” Saves a file called `cargo-timing.html` to the current directory - with a report of the compilation. Files are also saved with a timestamp in - the filename if you want to look at older runs. -- `info` โ€” Displays a message to stdout after each compilation finishes with - how long it took. -- `json` โ€” Emits some JSON information about timing information. - -The default if none are specified is `html,info`. - -#### Reading the graphs - -There are two graphs in the output. The "unit" graph shows the duration of -each unit over time. A "unit" is a single compiler invocation. There are lines -that show which additional units are "unlocked" when a unit finishes. That is, -it shows the new units that are now allowed to run because their dependencies -are all finished. Hover the mouse over a unit to highlight the lines. This can -help visualize the critical path of dependencies. This may change between runs -because the units may finish in different orders. - -The "codegen" times are highlighted in a lavender color. In some cases, build -pipelining allows units to start when their dependencies are performing code -generation. This information is not always displayed (for example, binary -units do not show when code generation starts). - -The "custom build" units are `build.rs` scripts, which when run are -highlighted in orange. - -The second graph shows Cargo's concurrency over time. The background -indicates CPU usage. The three lines are: -- "Waiting" (red) โ€” This is the number of units waiting for a CPU slot to - open. -- "Inactive" (blue) โ€” This is the number of units that are waiting for their - dependencies to finish. -- "Active" (green) โ€” This is the number of units currently running. - -Note: This does not show the concurrency in the compiler itself. `rustc` -coordinates with Cargo via the "job server" to stay within the concurrency -limit. This currently mostly applies to the code generation phase. - -Tips for addressing compile times: -- Look for slow dependencies. - - Check if they have features that you may wish to consider disabling. - - Consider trying to remove the dependency completely. -- Look for a crate being built multiple times with different versions. Try to - remove the older versions from the dependency graph. -- Split large crates into smaller pieces. -- If there are a large number of crates bottlenecked on a single crate, focus - your attention on improving that one crate to improve parallelism. - -### binary-dep-depinfo -* Tracking rustc issue: [#63012](https://github.com/rust-lang/rust/issues/63012) - -The `-Z binary-dep-depinfo` flag causes Cargo to forward the same flag to -`rustc` which will then cause `rustc` to include the paths of all binary -dependencies in the "dep info" file (with the `.d` extension). Cargo then uses -that information for change-detection (if any binary dependency changes, then -the crate will be rebuilt). The primary use case is for building the compiler -itself, which has implicit dependencies on the standard library that would -otherwise be untracked for change-detection. - -### panic-abort-tests -* Tracking Issue: [#67650](https://github.com/rust-lang/rust/issues/67650) -* Original Pull Request: [#7460](https://github.com/rust-lang/cargo/pull/7460) - -The `-Z panic-abort-tests` flag will enable nightly support to compile test -harness crates with `-Cpanic=abort`. Without this flag Cargo will compile tests, -and everything they depend on, with `-Cpanic=unwind` because it's the only way -`test`-the-crate knows how to operate. As of [rust-lang/rust#64158], however, -the `test` crate supports `-C panic=abort` with a test-per-process, and can help -avoid compiling crate graphs multiple times. - -It's currently unclear how this feature will be stabilized in Cargo, but we'd -like to stabilize it somehow! - -[rust-lang/rust#64158]: https://github.com/rust-lang/rust/pull/64158 - -### crate-type -* Tracking Issue: [#10083](https://github.com/rust-lang/cargo/issues/10083) -* RFC: [#3180](https://github.com/rust-lang/rfcs/pull/3180) -* Original Pull Request: [#10093](https://github.com/rust-lang/cargo/pull/10093) - -`cargo rustc --crate-type=lib,cdylib` forwards the `--crate-type` flag to `rustc`. -This runs `rustc` with the corresponding -[`--crate-type`](https://doc.rust-lang.org/rustc/command-line-arguments.html#--crate-type-a-list-of-types-of-crates-for-the-compiler-to-emit) -flag, and compiling. - -When using it, it requires the `-Z unstable-options` -command-line option: - -```console -cargo rustc --crate-type lib,cdylib -Z unstable-options -``` - -### config-cli -* Tracking Issue: [#7722](https://github.com/rust-lang/cargo/issues/7722) - -The `--config` CLI option allows arbitrary config values to be passed -in via the command-line. The argument should be in TOML syntax of KEY=VALUE: - -```console -cargo +nightly -Zunstable-options --config net.git-fetch-with-cli=true fetch -``` - -The `--config` option may be specified multiple times, in which case the -values are merged in left-to-right order, using the same merging logic that -multiple config files use. CLI values take precedence over environment -variables, which take precedence over config files. - -Some examples of what it looks like using Bourne shell syntax: - -```console -# Most shells will require escaping. -cargo --config http.proxy=\"http://example.com\" โ€ฆ - -# Spaces may be used. -cargo --config "net.git-fetch-with-cli = true" โ€ฆ - -# TOML array example. Single quotes make it easier to read and write. -cargo --config 'build.rustdocflags = ["--html-in-header", "header.html"]' โ€ฆ - -# Example of a complex TOML key. -cargo --config "target.'cfg(all(target_arch = \"arm\", target_os = \"none\"))'.runner = 'my-runner'" โ€ฆ - -# Example of overriding a profile setting. -cargo --config profile.dev.package.image.opt-level=3 โ€ฆ -``` - -### config-include -* Tracking Issue: [#7723](https://github.com/rust-lang/cargo/issues/7723) - -The `include` key in a config file can be used to load another config file. It -takes a string for a path to another file relative to the config file, or a -list of strings. It requires the `-Zconfig-include` command-line option. - -```toml -# .cargo/config -include = '../../some-common-config.toml' -``` - -The config values are first loaded from the include path, and then the config -file's own values are merged on top of it. - -This can be paired with [config-cli](#config-cli) to specify a file to load -from the command-line. Pass a path to a config file as the argument to -`--config`: - -```console -cargo +nightly -Zunstable-options -Zconfig-include --config somefile.toml build -``` - -CLI paths are relative to the current working directory. - -### target-applies-to-host -* Original Pull Request: [#9322](https://github.com/rust-lang/cargo/pull/9322) -* Tracking Issue: [#9453](https://github.com/rust-lang/cargo/issues/9453) - -The `target-applies-to-host` key in a config file can be used set the desired -behavior for passing target config flags to build scripts. - -It requires the `-Ztarget-applies-to-host` command-line option. - -The current default for `target-applies-to-host` is `true`, which will be -changed to `false` in the future, if `-Zhost-config` is used the new `false` -default will be set for `target-applies-to-host`. - -```toml -# config.toml -target-applies-to-host = false -``` - -```console -cargo +nightly -Ztarget-applies-to-host build --target x86_64-unknown-linux-gnu -``` - -### host-config -* Original Pull Request: [#9322](https://github.com/rust-lang/cargo/pull/9322) -* Tracking Issue: [#9452](https://github.com/rust-lang/cargo/issues/9452) - -The `host` key in a config file can be used pass flags to host build targets -such as build scripts that must run on the host system instead of the target -system when cross compiling. It supports both generic and host arch specific -tables. Matching host arch tables take precedence over generic host tables. - -It requires the `-Zhost-config` and `-Ztarget-applies-to-host` command-line -options to be set. - -```toml -# config.toml -[host] -linker = "/path/to/host/linker" -[host.x86_64-unknown-linux-gnu] -linker = "/path/to/host/arch/linker" -[target.x86_64-unknown-linux-gnu] -linker = "/path/to/target/linker" -``` - -The generic `host` table above will be entirely ignored when building on a -`x86_64-unknown-linux-gnu` host as the `host.x86_64-unknown-linux-gnu` table -takes precedence. - -Setting `-Zhost-config` changes the default for `target-applies-to-host` to -`false` from `true`. - -```console -cargo +nightly -Ztarget-applies-to-host -Zhost-config build --target x86_64-unknown-linux-gnu -``` - -### unit-graph -* Tracking Issue: [#8002](https://github.com/rust-lang/cargo/issues/8002) - -The `--unit-graph` flag can be passed to any build command (`build`, `check`, -`run`, `test`, `bench`, `doc`, etc.) to emit a JSON object to stdout which -represents Cargo's internal unit graph. Nothing is actually built, and the -command returns immediately after printing. Each "unit" corresponds to an -execution of the compiler. These objects also include which unit each unit -depends on. - -``` -cargo +nightly build --unit-graph -Z unstable-options -``` - -This structure provides a more complete view of the dependency relationship as -Cargo sees it. In particular, the "features" field supports the new feature -resolver where a dependency can be built multiple times with different -features. `cargo metadata` fundamentally cannot represent the relationship of -features between different dependency kinds, and features now depend on which -command is run and which packages and targets are selected. Additionally it -can provide details about intra-package dependencies like build scripts or -tests. - -The following is a description of the JSON structure: - -```javascript -{ - /* Version of the JSON output structure. If any backwards incompatible - changes are made, this value will be increased. - */ - "version": 1, - /* Array of all build units. */ - "units": [ - { - /* An opaque string which indicates the package. - Information about the package can be obtained from `cargo metadata`. - */ - "pkg_id": "my-package 0.1.0 (path+file:///path/to/my-package)", - /* The Cargo target. See the `cargo metadata` documentation for more - information about these fields. - https://doc.rust-lang.org/cargo/commands/cargo-metadata.html - */ - "target": { - "kind": ["lib"], - "crate_types": ["lib"], - "name": "my-package", - "src_path": "/path/to/my-package/src/lib.rs", - "edition": "2018", - "test": true, - "doctest": true - }, - /* The profile settings for this unit. - These values may not match the profile defined in the manifest. - Units can use modified profile settings. For example, the "panic" - setting can be overridden for tests to force it to "unwind". - */ - "profile": { - /* The profile name these settings are derived from. */ - "name": "dev", - /* The optimization level as a string. */ - "opt_level": "0", - /* The LTO setting as a string. */ - "lto": "false", - /* The codegen units as an integer. - `null` if it should use the compiler's default. - */ - "codegen_units": null, - /* The debug information level as an integer. - `null` if it should use the compiler's default (0). - */ - "debuginfo": 2, - /* Whether or not debug-assertions are enabled. */ - "debug_assertions": true, - /* Whether or not overflow-checks are enabled. */ - "overflow_checks": true, - /* Whether or not rpath is enabled. */ - "rpath": false, - /* Whether or not incremental is enabled. */ - "incremental": true, - /* The panic strategy, "unwind" or "abort". */ - "panic": "unwind" - }, - /* Which platform this target is being built for. - A value of `null` indicates it is for the host. - Otherwise it is a string of the target triple (such as - "x86_64-unknown-linux-gnu"). - */ - "platform": null, - /* The "mode" for this unit. Valid values: - - * "test" โ€” Build using `rustc` as a test. - * "build" โ€” Build using `rustc`. - * "check" โ€” Build using `rustc` in "check" mode. - * "doc" โ€” Build using `rustdoc`. - * "doctest" โ€” Test using `rustdoc`. - * "run-custom-build" โ€” Represents the execution of a build script. - */ - "mode": "build", - /* Array of features enabled on this unit as strings. */ - "features": ["somefeat"], - /* Whether or not this is a standard-library unit, - part of the unstable build-std feature. - If not set, treat as `false`. - */ - "is_std": false, - /* Array of dependencies of this unit. */ - "dependencies": [ - { - /* Index in the "units" array for the dependency. */ - "index": 1, - /* The name that this dependency will be referred as. */ - "extern_crate_name": "unicode_xid", - /* Whether or not this dependency is "public", - part of the unstable public-dependency feature. - If not set, the public-dependency feature is not enabled. - */ - "public": false, - /* Whether or not this dependency is injected into the prelude, - currently used by the build-std feature. - If not set, treat as `false`. - */ - "noprelude": false - } - ] - }, - // ... - ], - /* Array of indices in the "units" array that are the "roots" of the - dependency graph. - */ - "roots": [0], -} -``` - -### rustdoc-map -* Tracking Issue: [#8296](https://github.com/rust-lang/cargo/issues/8296) - -This feature adds configuration settings that are passed to `rustdoc` so that -it can generate links to dependencies whose documentation is hosted elsewhere -when the dependency is not documented. First, add this to `.cargo/config`: - -```toml -[doc.extern-map.registries] -crates-io = "https://docs.rs/" -``` - -Then, when building documentation, use the following flags to cause links -to dependencies to link to [docs.rs](https://docs.rs/): - -``` -cargo +nightly doc --no-deps -Zrustdoc-map -``` - -The `registries` table contains a mapping of registry name to the URL to link -to. The URL may have the markers `{pkg_name}` and `{version}` which will get -replaced with the corresponding values. If neither are specified, then Cargo -defaults to appending `{pkg_name}/{version}/` to the end of the URL. - -Another config setting is available to redirect standard library links. By -default, rustdoc creates links to . To -change this behavior, use the `doc.extern-map.std` setting: - -```toml -[doc.extern-map] -std = "local" -``` - -A value of `"local"` means to link to the documentation found in the `rustc` -sysroot. If you are using rustup, this documentation can be installed with -`rustup component add rust-docs`. - -The default value is `"remote"`. - -The value may also take a URL for a custom location. - -### terminal-width - -* Tracking Issue: [#84673](https://github.com/rust-lang/rust/issues/84673) - -This feature provides a new flag, `-Z terminal-width`, which is used to pass -a terminal width to `rustc` so that error messages containing long lines -can be intelligently truncated. - -For example, passing `-Z terminal-width=20` (an arbitrarily low value) might -produce the following error: - -```text -error[E0308]: mismatched types - --> src/main.rs:2:17 - | -2 | ..._: () = 42; - | -- ^^ expected `()`, found integer - | | - | expected due to this - -error: aborting due to previous error -``` - -In contrast, without `-Z terminal-width`, the error would look as shown below: - -```text -error[E0308]: mismatched types - --> src/main.rs:2:17 - | -2 | let _: () = 42; - | -- ^^ expected `()`, found integer - | | - | expected due to this - -error: aborting due to previous error -``` - -### Weak dependency features -* Tracking Issue: [#8832](https://github.com/rust-lang/cargo/issues/8832) - -The `-Z weak-dep-features` command-line options enables the ability to use -`dep_name?/feat_name` syntax in the `[features]` table. The `?` indicates that -the optional dependency `dep_name` will not be automatically enabled. The -feature `feat_name` will only be added if something else enables the -`dep_name` dependency. - -Example: - -```toml -[dependencies] -serde = { version = "1.0.117", optional = true, default-features = false } - -[features] -std = ["serde?/std"] -``` - -In this example, the `std` feature enables the `std` feature on the `serde` -dependency. However, unlike the normal `serde/std` syntax, it will not enable -the optional dependency `serde` unless something else has included it. - -### per-package-target -* Tracking Issue: [#9406](https://github.com/rust-lang/cargo/pull/9406) -* Original Pull Request: [#9030](https://github.com/rust-lang/cargo/pull/9030) -* Original Issue: [#7004](https://github.com/rust-lang/cargo/pull/7004) - -The `per-package-target` feature adds two keys to the manifest: -`package.default-target` and `package.forced-target`. The first makes -the package be compiled by default (ie. when no `--target` argument is -passed) for some target. The second one makes the package always be -compiled for the target. - -Example: - -```toml -[package] -forced-target = "wasm32-unknown-unknown" -``` - -In this example, the crate is always built for -`wasm32-unknown-unknown`, for instance because it is going to be used -as a plugin for a main program that runs on the host (or provided on -the command line) target. - -### credential-process -* Tracking Issue: [#8933](https://github.com/rust-lang/cargo/issues/8933) -* RFC: [#2730](https://github.com/rust-lang/rfcs/pull/2730) - -The `credential-process` feature adds a config setting to fetch registry -authentication tokens by calling an external process. - -Token authentication is used by the [`cargo login`], [`cargo publish`], -[`cargo owner`], and [`cargo yank`] commands. Additionally, this feature adds -a new `cargo logout` command. - -To use this feature, you must pass the `-Z credential-process` flag on the -command-line. Additionally, you must remove any current tokens currently saved -in the [`credentials` file] (which can be done with the new `logout` command). - -#### `credential-process` Configuration - -To configure which process to run to fetch the token, specify the process in -the `registry` table in a [config file]: - -```toml -[registry] -credential-process = "/usr/bin/cargo-creds" -``` - -If you want to use a different process for a specific registry, it can be -specified in the `registries` table: - -```toml -[registries.my-registry] -credential-process = "/usr/bin/cargo-creds" -``` - -The value can be a string with spaces separating arguments or it can be a TOML -array of strings. - -Command-line arguments allow special placeholders which will be replaced with -the corresponding value: - -* `{name}` โ€” The name of the registry. -* `{api_url}` โ€” The base URL of the registry API endpoints. -* `{action}` โ€” The authentication action (described below). - -Process names with the prefix `cargo:` are loaded from the `libexec` directory -next to cargo. Several experimental credential wrappers are included with -Cargo, and this provides convenient access to them: - -```toml -[registry] -credential-process = "cargo:macos-keychain" -``` - -The current wrappers are: - -* `cargo:macos-keychain`: Uses the macOS Keychain to store the token. -* `cargo:wincred`: Uses the Windows Credential Manager to store the token. -* `cargo:1password`: Uses the 1password `op` CLI to store the token. You must - install the `op` CLI from the [1password - website](https://1password.com/downloads/command-line/). You must run `op - signin` at least once with the appropriate arguments (such as `op signin - my.1password.com user@example.com`), unless you provide the sign-in-address - and email arguments. The master password will be required on each request - unless the appropriate `OP_SESSION` environment variable is set. It supports - the following command-line arguments: - * `--account`: The account shorthand name to use. - * `--vault`: The vault name to use. - * `--sign-in-address`: The sign-in-address, which is a web address such as `my.1password.com`. - * `--email`: The email address to sign in with. - -A wrapper is available for GNOME -[libsecret](https://wiki.gnome.org/Projects/Libsecret) to store tokens on -Linux systems. Due to build limitations, this wrapper is not available as a -pre-compiled binary. This can be built and installed manually. First, install -libsecret using your system package manager (for example, `sudo apt install -libsecret-1-dev`). Then build and install the wrapper with `cargo install -cargo-credential-gnome-secret`. -In the config, use a path to the binary like this: - -```toml -[registry] -credential-process = "cargo-credential-gnome-secret {action}" -``` - -#### `credential-process` Interface - -There are two different kinds of token processes that Cargo supports. The -simple "basic" kind will only be called by Cargo when it needs a token. This -is intended for simple and easy integration with password managers, that can -often use pre-existing tooling. The more advanced "Cargo" kind supports -different actions passed as a command-line argument. This is intended for more -pleasant integration experience, at the expense of requiring a Cargo-specific -process to glue to the password manager. Cargo will determine which kind is -supported by the `credential-process` definition. If it contains the -`{action}` argument, then it uses the advanced style, otherwise it assumes it -only supports the "basic" kind. - -##### Basic authenticator - -A basic authenticator is a process that returns a token on stdout. Newlines -will be trimmed. The process inherits the user's stdin and stderr. It should -exit 0 on success, and nonzero on error. - -With this form, [`cargo login`] and `cargo logout` are not supported and -return an error if used. - -##### Cargo authenticator - -The protocol between the Cargo and the process is very basic, intended to -ensure the credential process is kept as simple as possible. Cargo will -execute the process with the `{action}` argument indicating which action to -perform: - -* `store` โ€” Store the given token in secure storage. -* `get` โ€” Get a token from storage. -* `erase` โ€” Remove a token from storage. - -The `cargo login` command uses `store` to save a token. Commands that require -authentication, like `cargo publish`, uses `get` to retrieve a token. `cargo -logout` uses the `erase` command to remove a token. - -The process inherits the user's stderr, so the process can display messages. -Some values are passed in via environment variables (see below). The expected -interactions are: - -* `store` โ€” The token is sent to the process's stdin, terminated by a newline. - The process should store the token keyed off the registry name. If the - process fails, it should exit with a nonzero exit status. - -* `get` โ€” The process should send the token to its stdout (trailing newline - will be trimmed). The process inherits the user's stdin, should it need to - receive input. - - If the process is unable to fulfill the request, it should exit with a - nonzero exit code. - -* `erase` โ€” The process should remove the token associated with the registry - name. If the token is not found, the process should exit with a 0 exit - status. - -##### Environment - -The following environment variables will be provided to the executed command: - -* `CARGO` โ€” Path to the `cargo` binary executing the command. -* `CARGO_REGISTRY_NAME` โ€” Name of the registry the authentication token is for. -* `CARGO_REGISTRY_API_URL` โ€” The URL of the registry API. - -#### `cargo logout` - -A new `cargo logout` command has been added to make it easier to remove a -token from storage. This supports both [`credentials` file] tokens and -`credential-process` tokens. - -When used with `credentials` file tokens, it needs the `-Z unstable-options` -command-line option: - -```console -cargo logout -Z unstable-options -``` - -When used with the `credential-process` config, use the `-Z -credential-process` command-line option: - - -```console -cargo logout -Z credential-process -``` - -[`cargo login`]: ../commands/cargo-login.md -[`cargo publish`]: ../commands/cargo-publish.md -[`cargo owner`]: ../commands/cargo-owner.md -[`cargo yank`]: ../commands/cargo-yank.md -[`credentials` file]: config.md#credentials -[crates.io]: https://crates.io/ -[config file]: config.md - -### `cargo config` - -* Original Issue: [#2362](https://github.com/rust-lang/cargo/issues/2362) -* Tracking Issue: [#9301](https://github.com/rust-lang/cargo/issues/9301) - -The `cargo config` subcommand provides a way to display the configuration -files that cargo loads. It currently includes the `get` subcommand which -can take an optional config value to display. - -```console -cargo +nightly -Zunstable-options config get build.rustflags -``` - -If no config value is included, it will display all config values. See the -`--help` output for more options available. - -### `doctest-in-workspace` - -* Tracking Issue: [#9427](https://github.com/rust-lang/cargo/issues/9427) - -The `-Z doctest-in-workspace` flag changes the behavior of the current working -directory used when running doctests. Historically, Cargo has run `rustdoc ---test` relative to the root of the package, with paths relative from that -root. However, this is inconsistent with how `rustc` and `rustdoc` are -normally run in a workspace, where they are run relative to the workspace -root. This inconsistency causes problems in various ways, such as when passing -RUSTDOCFLAGS with relative paths, or dealing with diagnostic output. - -The `-Z doctest-in-workspace` flag causes cargo to switch to running `rustdoc` -from the root of the workspace. It also passes the `--test-run-directory` to -`rustdoc` so that when *running* the tests, they are run from the root of the -package. This preserves backwards compatibility and is consistent with how -normal unittests are run. - -### rustc `--print` - -* Tracking Issue: [#9357](https://github.com/rust-lang/cargo/issues/9357) - -`cargo rustc --print=VAL` forwards the `--print` flag to `rustc` in order to -extract information from `rustc`. This runs `rustc` with the corresponding -[`--print`](https://doc.rust-lang.org/rustc/command-line-arguments.html#--print-print-compiler-information) -flag, and then immediately exits without compiling. Exposing this as a cargo -flag allows cargo to inject the correct target and RUSTFLAGS based on the -current configuration. - -The primary use case is to run `cargo rustc --print=cfg` to get config values -for the appropriate target and influenced by any other RUSTFLAGS. - - -### Different binary name - -* Tracking Issue: [#9778](https://github.com/rust-lang/cargo/issues/9778) -* PR: [#9627](https://github.com/rust-lang/cargo/pull/9627) - -The `different-binary-name` feature allows setting the filename of the binary without having to obey the -restrictions placed on crate names. For example, the crate name must use only `alphanumeric` characters -or `-` or `_`, and cannot be empty. - -The `filename` parameter should **not** include the binary extension, `cargo` will figure out the appropriate -extension and use that for the binary on its own. - -The `filename` parameter is only available in the `[[bin]]` section of the manifest. - -```toml -cargo-features = ["different-binary-name"] - -[project] -name = "foo" -version = "0.0.1" - -[[bin]] -name = "foo" -filename = "007bar" -path = "src/main.rs" -``` - -### scrape-examples - -* RFC: [#3123](https://github.com/rust-lang/rfcs/pull/3123) -* Tracking Issue: [#9910](https://github.com/rust-lang/cargo/issues/9910) - -The `-Z rustdoc-scrape-examples` argument tells Rustdoc to search crates in the current workspace -for calls to functions. Those call-sites are then included as documentation. The flag can take an -argument of `all` or `examples` which configures which crate in the workspace to analyze for examples. -For instance: - -``` -cargo doc -Z unstable-options -Z rustdoc-scrape-examples=examples -``` - -## Stabilized and removed features - -### Compile progress - -The compile-progress feature has been stabilized in the 1.30 release. -Progress bars are now enabled by default. -See [`term.progress`](config.md#termprogresswhen) for more information about -controlling this feature. - -### Edition - -Specifying the `edition` in `Cargo.toml` has been stabilized in the 1.31 release. -See [the edition field](manifest.md#the-edition-field) for more information -about specifying this field. - -### rename-dependency - -Specifying renamed dependencies in `Cargo.toml` has been stabilized in the 1.31 release. -See [renaming dependencies](specifying-dependencies.md#renaming-dependencies-in-cargotoml) -for more information about renaming dependencies. - -### Alternate Registries - -Support for alternate registries has been stabilized in the 1.34 release. -See the [Registries chapter](registries.md) for more information about alternate registries. - -### Offline Mode - -The offline feature has been stabilized in the 1.36 release. -See the [`--offline` flag](../commands/cargo.md#option-cargo---offline) for -more information on using the offline mode. - -### publish-lockfile - -The `publish-lockfile` feature has been removed in the 1.37 release. -The `Cargo.lock` file is always included when a package is published if the -package contains a binary target. `cargo install` requires the `--locked` flag -to use the `Cargo.lock` file. -See [`cargo package`](../commands/cargo-package.md) and -[`cargo install`](../commands/cargo-install.md) for more information. - -### default-run - -The `default-run` feature has been stabilized in the 1.37 release. -See [the `default-run` field](manifest.md#the-default-run-field) for more -information about specifying the default target to run. - -### cache-messages - -Compiler message caching has been stabilized in the 1.40 release. -Compiler warnings are now cached by default and will be replayed automatically -when re-running Cargo. - -### install-upgrade - -The `install-upgrade` feature has been stabilized in the 1.41 release. -[`cargo install`] will now automatically upgrade packages if they appear to be -out-of-date. See the [`cargo install`] documentation for more information. - -[`cargo install`]: ../commands/cargo-install.md - -### Profile Overrides - -Profile overrides have been stabilized in the 1.41 release. -See [Profile Overrides](profiles.md#overrides) for more information on using -overrides. - -### Config Profiles - -Specifying profiles in Cargo config files and environment variables has been -stabilized in the 1.43 release. -See the [config `[profile]` table](config.md#profile) for more information -about specifying [profiles](profiles.md) in config files. - -### crate-versions - -The `-Z crate-versions` flag has been stabilized in the 1.47 release. -The crate version is now automatically included in the -[`cargo doc`](../commands/cargo-doc.md) documentation sidebar. - -### Features - -The `-Z features` flag has been stabilized in the 1.51 release. -See [feature resolver version 2](features.md#feature-resolver-version-2) -for more information on using the new feature resolver. - -### package-features - -The `-Z package-features` flag has been stabilized in the 1.51 release. -See the [resolver version 2 command-line flags](features.md#resolver-version-2-command-line-flags) -for more information on using the features CLI options. - -### Resolver - -The `resolver` feature in `Cargo.toml` has been stabilized in the 1.51 release. -See the [resolver versions](resolver.md#resolver-versions) for more -information about specifying resolvers. - -### extra-link-arg - -The `extra-link-arg` feature to specify additional linker arguments in build -scripts has been stabilized in the 1.56 release. See the [build script -documentation](build-scripts.md#outputs-of-the-build-script) for more -information on specifying extra linker arguments. - -### configurable-env - -The `configurable-env` feature to specify environment variables in Cargo -configuration has been stabilized in the 1.56 release. See the [config -documentation](config.html#env) for more information about configuring -environment variables. - -### rust-version - -The `rust-version` field in `Cargo.toml` has been stabilized in the 1.56 release. -See the [rust-version field](manifest.html#the-rust-version-field) for more -information on using the `rust-version` field and the `--ignore-rust-version` option. - -### codegen-backend - -The `codegen-backend` feature makes it possible to select the codegen backend used by rustc using a -profile. - -Example: - -```toml -[package] -name = "foo" - -[dependencies] -serde = "1.0.117" - -[profile.dev.package.foo] -codegen-backend = "cranelift" -``` - -### patch-in-config - -The `-Z patch-in-config` flag, and the corresponding support for -`[patch]` section in Cargo configuration files has been stabilized in -the 1.56 release. See the [patch field](config.html#patch) for more -information. - -### edition 2021 - -The 2021 edition has been stabilized in the 1.56 release. -See the [`edition` field](manifest.md#the-edition-field) for more information on setting the edition. -See [`cargo fix --edition`](../commands/cargo-fix.md) and [The Edition Guide](../../edition-guide/index.html) for more information on migrating existing projects. - - -### Custom named profiles - -Custom named profiles have been stabilized in the 1.57 release. See the -[profiles chapter](profiles.md#custom-profiles) for more information. - -### Profile `strip` option - -The profile `strip` option has been stabilized in the 1.59 release. See the -[profiles chapter](profiles.md#strip) for more information. - -### Future incompat report - -Support for generating a future-incompat report has been stabilized -in the 1.59 release. See the [future incompat report chapter](future-incompat-report.md) -for more information. diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/workspaces.md b/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/workspaces.md deleted file mode 100644 index 97472e352..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/src/reference/workspaces.md +++ /dev/null @@ -1,124 +0,0 @@ -## Workspaces - -A *workspace* is a collection of one or more packages that share common -dependency resolution (with a shared `Cargo.lock`), output directory, and -various settings such as profiles. Packages that are part of a workspaces are -called *workspace members*. There are two flavours of workspaces: as root -package or as virtual manifest. - -### Root package - -A workspace can be created by adding a [`[workspace]` -section](#the-workspace-section) to `Cargo.toml`. This can be added to a -`Cargo.toml` that already defines a `[package]`, in which case the package is -the *root package* of the workspace. The *workspace root* is the directory -where the workspace's `Cargo.toml` is located. - -### Virtual manifest - -Alternatively, a `Cargo.toml` file can be created with a `[workspace]` section -but without a [`[package]` section][package]. This is called a *virtual -manifest*. This is typically useful when there isn't a "primary" package, or -you want to keep all the packages organized in separate directories. - -### Key features - -The key points of workspaces are: - -* All packages share a common `Cargo.lock` file which resides in the - *workspace root*. -* All packages share a common [output directory], which defaults to a - directory named `target` in the *workspace root*. -* The [`[patch]`][patch], [`[replace]`][replace] and [`[profile.*]`][profiles] - sections in `Cargo.toml` are only recognized in the *root* manifest, and - ignored in member crates' manifests. - -### The `[workspace]` section - -The `[workspace]` table in `Cargo.toml` defines which packages are members of -the workspace: - -```toml -[workspace] -members = ["member1", "path/to/member2", "crates/*"] -exclude = ["crates/foo", "path/to/other"] -``` - -All [`path` dependencies] residing in the workspace directory automatically -become members. Additional members can be listed with the `members` key, which -should be an array of strings containing directories with `Cargo.toml` files. - -The `members` list also supports [globs] to match multiple paths, using -typical filename glob patterns like `*` and `?`. - -The `exclude` key can be used to prevent paths from being included in a -workspace. This can be useful if some path dependencies aren't desired to be -in the workspace at all, or using a glob pattern and you want to remove a -directory. - -An empty `[workspace]` table can be used with a `[package]` to conveniently -create a workspace with the package and all of its path dependencies. - -### Workspace selection - -When inside a subdirectory within the workspace, Cargo will automatically -search the parent directories for a `Cargo.toml` file with a `[workspace]` -definition to determine which workspace to use. The [`package.workspace`] -manifest key can be used in member crates to point at a workspace's root to -override this automatic search. The manual setting can be useful if the member -is not inside a subdirectory of the workspace root. - -### Package selection - -In a workspace, package-related cargo commands like [`cargo build`] can use -the `-p` / `--package` or `--workspace` command-line flags to determine which -packages to operate on. If neither of those flags are specified, Cargo will -use the package in the current working directory. If the current directory is -a virtual workspace, it will apply to all members (as if `--workspace` were -specified on the command-line). - -The optional `default-members` key can be specified to set the members to -operate on when in the workspace root and the package selection flags are not -used: - -```toml -[workspace] -members = ["path/to/member1", "path/to/member2", "path/to/member3/*"] -default-members = ["path/to/member2", "path/to/member3/foo"] -``` - -When specified, `default-members` must expand to a subset of `members`. - -### The `workspace.metadata` table - -The `workspace.metadata` table is ignored by Cargo and will not be warned -about. This section can be used for tools that would like to store workspace -configuration in `Cargo.toml`. For example: - -```toml -[workspace] -members = ["member1", "member2"] - -[workspace.metadata.webcontents] -root = "path/to/webproject" -tool = ["npm", "run", "build"] -# ... -``` - -There is a similar set of tables at the package level at -[`package.metadata`][package-metadata]. While cargo does not specify a -format for the content of either of these tables, it is suggested that -external tools may wish to use them in a consistent fashion, such as referring -to the data in `workspace.metadata` if data is missing from `package.metadata`, -if that makes sense for the tool in question. - -[package]: manifest.md#the-package-section -[package-metadata]: manifest.md#the-metadata-table -[output directory]: ../guide/build-cache.md -[patch]: overriding-dependencies.md#the-patch-section -[replace]: overriding-dependencies.md#the-replace-section -[profiles]: profiles.md -[`path` dependencies]: specifying-dependencies.md#specifying-path-dependencies -[`package.workspace`]: manifest.md#the-workspace-field -[globs]: https://docs.rs/glob/0.3.0/glob/struct.Pattern.html -[`cargo build`]: ../commands/cargo-build.md diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/theme/favicon.png b/collector/compile-benchmarks/cargo-0.60.0/src/doc/theme/favicon.png deleted file mode 100644 index 47c8f628f..000000000 Binary files a/collector/compile-benchmarks/cargo-0.60.0/src/doc/theme/favicon.png and /dev/null differ diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/doc/theme/head.hbs b/collector/compile-benchmarks/cargo-0.60.0/src/doc/theme/head.hbs deleted file mode 100644 index 062417e11..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/doc/theme/head.hbs +++ /dev/null @@ -1,5 +0,0 @@ - diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/_cargo b/collector/compile-benchmarks/cargo-0.60.0/src/etc/_cargo deleted file mode 100644 index 5356313b6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/_cargo +++ /dev/null @@ -1,426 +0,0 @@ -#compdef cargo - -autoload -U regexp-replace - -_cargo() { - local curcontext="$curcontext" ret=1 - local -a command_scope_spec common parallel features msgfmt triple target registry - local -a state line state_descr # These are set by _arguments - typeset -A opt_args - - common=( - '(-q --quiet)*'{-v,--verbose}'[use verbose output]' - '(-q --quiet -v --verbose)'{-q,--quiet}'[no output printed to stdout]' - '-Z+[pass unstable (nightly-only) flags to cargo]: :_cargo_unstable_flags' - '--frozen[require that Cargo.lock and cache are up-to-date]' - '--locked[require that Cargo.lock is up-to-date]' - '--color=[specify colorization option]:coloring:(auto always never)' - '(- 1 *)'{-h,--help}'[show help message]' - ) - - # leading items in parentheses are an exclusion list for the arguments following that arg - # See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions - # - => exclude all other options - # 1 => exclude positional arg 1 - # * => exclude all other args - # +blah => exclude +blah - _arguments -s -S -C $common \ - '(- 1 *)--list[list installed commands]' \ - '(- 1 *)--explain=[provide a detailed explanation of an error message]:error code' \ - '(- 1 *)'{-V,--version}'[show version information]' \ - '(+beta +nightly)+stable[use the stable toolchain]' \ - '(+stable +nightly)+beta[use the beta toolchain]' \ - '(+stable +beta)+nightly[use the nightly toolchain]' \ - '1: :_cargo_cmds' \ - '*:: :->args' - - # These flags are mutually exclusive specifiers for the scope of a command; as - # they are used in multiple places without change, they are expanded into the - # appropriate command's `_arguments` where appropriate. - command_scope_spec=( - '(--bin --example --test --lib)--bench=[specify benchmark name]: :_cargo_benchmark_names' - '(--bench --bin --test --lib)--example=[specify example name]:example name:_cargo_example_names' - '(--bench --example --test --lib)--bin=[specify binary name]:binary name' - '(--bench --bin --example --test)--lib=[specify library name]:library name' - '(--bench --bin --example --lib)--test=[specify test name]:test name' - ) - - parallel=( - '(-j --jobs)'{-j+,--jobs=}'[specify number of parallel jobs]:jobs [# of CPUs]' - ) - - features=( - '(--all-features)--features=[specify features to activate]:feature' - '(--features)--all-features[activate all available features]' - "--no-default-features[don't build the default features]" - ) - - msgfmt='--message-format=[specify error format]:error format [human]:(human json short)' - triple='--target=[specify target triple]:target triple:_cargo_target_triple' - target='--target-dir=[specify directory for all generated artifacts]:directory:_directories' - manifest='--manifest-path=[specify path to manifest]:path:_directories' - registry='--registry=[specify registry to use]:registry' - - case $state in - args) - curcontext="${curcontext%:*}-${words[1]}:" - case ${words[1]} in - bench) - _arguments -s -A "^--" $common $parallel $features $msgfmt $triple $target $manifest \ - "${command_scope_spec[@]}" \ - '--all-targets[benchmark all targets]' \ - "--no-run[compile but don't run]" \ - '(-p --package)'{-p+,--package=}'[specify package to run benchmarks for]:package:_cargo_package_names' \ - '--exclude=[exclude packages from the benchmark]:spec' \ - '--no-fail-fast[run all benchmarks regardless of failure]' \ - '1: :_guard "^-*" "bench name"' \ - '*:args:_default' - ;; - - build | b) - _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ - '--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \ - "${command_scope_spec[@]}" \ - '(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \ - '--release[build in release mode]' \ - '--build-plan[output the build plan in JSON]' \ - ;; - - check | c) - _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ - '--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \ - "${command_scope_spec[@]}" \ - '(-p --package)'{-p+,--package=}'[specify package to check]:package:_cargo_package_names' \ - '--release[check in release mode]' \ - ;; - - clean) - _arguments -s -S $common $triple $target $manifest \ - '(-p --package)'{-p+,--package=}'[specify package to clean]:package:_cargo_package_names' \ - '--release[clean release artifacts]' \ - '--doc[clean just the documentation directory]' - ;; - - doc | d) - _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ - '--no-deps[do not build docs for dependencies]' \ - '--document-private-items[include non-public items in the documentation]' \ - '--open[open docs in browser after the build]' \ - '(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \ - '--release[build artifacts in release mode, with optimizations]' \ - ;; - - fetch) - _arguments -s -S $common $triple $manifest - ;; - - fix) - _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ - "${command_scope_spec[@]}" \ - '--broken-code[fix code even if it already has compiler errors]' \ - '--edition[fix in preparation for the next edition]' \ - '--edition-idioms[fix warnings to migrate to the idioms of an edition]' \ - '--allow-no-vcs[fix code even if a VCS was not detected]' \ - '--allow-dirty[fix code even if the working directory is dirty]' \ - '--allow-staged[fix code even if the working directory has staged changes]' - ;; - - generate-lockfile) - _arguments -s -S $common $manifest - ;; - - help) - _cargo_cmds - ;; - - init) - _arguments -s -S $common $registry \ - '--lib[use library template]' \ - '--edition=[specify edition to set for the crate generated]:edition:(2015 2018 2021)' \ - '--vcs=[initialize a new repo with a given VCS]:vcs:(git hg pijul fossil none)' \ - '--name=[set the resulting package name]:name' \ - '1:path:_directories' - ;; - - install) - _arguments -s -S $common $parallel $features $triple $registry \ - '(-f --force)'{-f,--force}'[force overwriting of existing crates or binaries]' \ - '--bin=[only install the specified binary]:binary' \ - '--branch=[branch to use when installing from git]:branch' \ - '--debug[build in debug mode instead of release mode]' \ - '--example=[install the specified example instead of binaries]:example:_cargo_example_names' \ - '--git=[specify URL from which to install the crate]:url:_urls' \ - '--path=[local filesystem path to crate to install]: :_directories' \ - '--rev=[specific commit to use when installing from git]:commit' \ - '--root=[directory to install packages into]: :_directories' \ - '--tag=[tag to use when installing from git]:tag' \ - '--vers=[version to install from crates.io]:version' \ - '--list[list all installed packages and their versions]' \ - '*: :_guard "^-*" "crate"' - ;; - - locate-project) - _arguments -s -S $common $manifest \ - '--message-format=[specify output representation]:output representation [json]:(json plain)' - '--workspace[locate Cargo.toml of the workspace root]' - ;; - - login) - _arguments -s -S $common $registry \ - '*: :_guard "^-*" "token"' - ;; - - metadata) - _arguments -s -S $common $features $manifest \ - "--no-deps[output information only about the root package and don't fetch dependencies]" \ - '--format-version=[specify format version]:version [1]:(1)' - ;; - - new) - _arguments -s -S $common $registry \ - '--lib[use library template]' \ - '--vcs:initialize a new repo with a given VCS:(git hg none)' \ - '--name=[set the resulting package name]' - ;; - - owner) - _arguments -s -S $common $registry \ - '(-a --add)'{-a,--add}'[specify name of a user or team to invite as an owner]:name' \ - '--index=[specify registry index]:index' \ - '(-l --list)'{-l,--list}'[list owners of a crate]' \ - '(-r --remove)'{-r,--remove}'[specify name of a user or team to remove as an owner]:name' \ - '--token=[specify API token to use when authenticating]:token' \ - '*: :_guard "^-*" "crate"' - ;; - - package) - _arguments -s -S $common $parallel $features $triple $target $manifest \ - '(-l --list)'{-l,--list}'[print files included in a package without making one]' \ - '--no-metadata[ignore warnings about a lack of human-usable metadata]' \ - '--allow-dirty[allow dirty working directories to be packaged]' \ - "--no-verify[don't build to verify contents]" - ;; - - pkgid) - _arguments -s -S $common $manifest \ - '(-p --package)'{-p+,--package=}'[specify package to get ID specifier for]:package:_cargo_package_names' \ - '*: :_guard "^-*" "spec"' - ;; - - publish) - _arguments -s -S $common $parallel $features $triple $target $manifest $registry \ - '--index=[specify registry index]:index' \ - '--allow-dirty[allow dirty working directories to be packaged]' \ - "--no-verify[don't verify the contents by building them]" \ - '--token=[specify token to use when uploading]:token' \ - '--dry-run[perform all checks without uploading]' - ;; - - read-manifest) - _arguments -s -S $common $manifest - ;; - - run | r) - _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ - '--example=[name of the bin target]:name:_cargo_example_names' \ - '--bin=[name of the bin target]:name' \ - '(-p --package)'{-p+,--package=}'[specify package with the target to run]:package:_cargo_package_names' \ - '--release[build in release mode]' \ - '*: :_default' - ;; - - rustc) - _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ - '(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \ - '--profile=[specify profile to build the selected target for]:profile' \ - '--release[build artifacts in release mode, with optimizations]' \ - "${command_scope_spec[@]}" \ - '*: : _dispatch rustc rustc -default-' - ;; - - rustdoc) - _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ - '--document-private-items[include non-public items in the documentation]' \ - '--open[open the docs in a browser after the operation]' \ - '(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \ - '--release[build artifacts in release mode, with optimizations]' \ - "${command_scope_spec[@]}" \ - '*: : _dispatch rustdoc rustdoc -default-' - ;; - - search) - _arguments -s -S $common $registry \ - '--index=[specify registry index]:index' \ - '--limit=[limit the number of results]:results [10]' \ - '*: :_guard "^-*" "query"' - ;; - - test | t) - _arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \ - '--test=[test name]: :_cargo_test_names' \ - '--no-fail-fast[run all tests regardless of failure]' \ - '--no-run[compile but do not run]' \ - '(-p --package)'{-p+,--package=}'[package to run tests for]:package:_cargo_package_names' \ - '--all[test all packages in the workspace]' \ - '--release[build artifacts in release mode, with optimizations]' \ - '1: :_cargo_test_names' \ - '(--doc --bin --example --test --bench)--lib[only test library]' \ - '(--lib --bin --example --test --bench)--doc[only test documentation]' \ - '(--lib --doc --example --test --bench)--bin=[binary name]' \ - '(--lib --doc --bin --test --bench)--example=[example name]:_cargo_example_names' \ - '(--lib --doc --bin --example --bench)--test=[test name]' \ - '(--lib --doc --bin --example --test)--bench=[benchmark name]' \ - '*: :_default' - ;; - - tree) - _arguments -s -S $common $features $triple $manifest \ - '(-p --package)'{-p+,--package=}'[package to use as the root]:package:_cargo_package_names' \ - '(-i --invert)'{-i+,--invert=}'[invert the tree for the given package]:package:_cargo_package_names' \ - '--prefix=[line prefix]:prefix:(depth indent none)' \ - '--no-dedupe[repeat shared dependencies]' \ - '(-d --duplicates)'{-d,--duplicates}'[packages with multiple versions]' \ - '--charset=[utf8 or ascii]:charset:(utf8 ascii)' \ - '(-f --format)'{-f,--format=}'[format string]:format' \ - '(-e --edges)'{-e,--edges=}'[edge kinds]:kind:(features normal build dev all no-dev no-build no-normal)' \ - ;; - - uninstall) - _arguments -s -S $common \ - '(-p --package)'{-p+,--package=}'[specify package to uninstall]:package:_cargo_package_names' \ - '--bin=[only uninstall the specified binary]:name' \ - '--root=[directory to uninstall packages from]: :_files -/' \ - '*:crate:_cargo_installed_crates -F line' - ;; - - update) - _arguments -s -S $common $manifest \ - '--aggressive=[force dependency update]' \ - "--dry-run[don't actually write the lockfile]" \ - '(-p --package)'{-p+,--package=}'[specify package to update]:package:_cargo_package_names' \ - '--precise=[update single dependency to precise release]:release' - ;; - - verify-project) - _arguments -s -S $common $manifest - ;; - - version) - _arguments -s -S $common - ;; - - yank) - _arguments -s -S $common $registry \ - '--vers=[specify yank version]:version' \ - '--undo[undo a yank, putting a version back into the index]' \ - '--index=[specify registry index to yank from]:registry index' \ - '--token=[specify API token to use when authenticating]:token' \ - '*: :_guard "^-*" "crate"' - ;; - *) - # allow plugins to define their own functions - if ! _call_function ret _cargo-${words[1]}; then - # fallback on default completion for unknown commands - _default && ret=0 - fi - (( ! ret )) - ;; - esac - ;; - esac -} - -_cargo_unstable_flags() { - local flags - flags=( help ${${${(M)${(f)"$(_call_program flags cargo -Z help)"}:#*--*}/ #-- #/:}##*-Z } ) - _describe -t flags 'unstable flag' flags -} - -_cargo_installed_crates() { - local expl - _description crates expl 'crate' - compadd "$@" "$expl[@]" - ${${${(f)"$(cargo install --list)"}:# *}%% *} -} - -_cargo_cmds() { - local -a commands - # This uses Parameter Expansion Flags, which are a built-in Zsh feature. - # See more: http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags - # and http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion - # - # # How this work? - # - # First it splits the result of `cargo --list` at newline, then it removes the first line. - # Then it removes indentation (4 whitespaces) before each items. (Note the x## pattern [1]). - # Then it replaces those spaces between item and description with a `:` - # - # [1]: https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org#patterns - commands=( ${${${(M)"${(f)$(_call_program commands cargo --list)}":# *}/ ##/}/ ##/:} ) - _describe -t commands 'command' commands -} - -_cargo_target_triple() { - local -a targets - targets=( ${(f)"$(rustc --print target-list)"} ) - _describe 'target triple' targets -} - -#FIXME: Disabled until fixed -#gets package names from the manifest file -_cargo_package_names() { - _message -e packages package -} - -# Extracts the values of "name" from the array given in $1 and shows them as -# command line options for completion -_cargo_names_from_array() { - local manifest=$(cargo locate-project --message-format plain) - if [[ -z $manifest ]]; then - return 0 - fi - - local last_line - local -a names; - local in_block=false - local block_name=$1 - names=() - while read -r line; do - if [[ $last_line == "[[$block_name]]" ]]; then - in_block=true - else - if [[ $last_line =~ '\s*\[\[.*' ]]; then - in_block=false - fi - fi - - if [[ $in_block == true ]]; then - if [[ $line =~ '\s*name\s*=' ]]; then - regexp-replace line '^\s*name\s*=\s*|"' '' - names+=( "$line" ) - fi - fi - - last_line=$line - done < "$manifest" - _describe "$block_name" names - -} - -#Gets the test names from the manifest file -_cargo_test_names() { - _cargo_names_from_array "test" -} - -#Gets the bench names from the manifest file -_cargo_benchmark_names() { - _cargo_names_from_array "bench" -} - -_cargo_example_names() { - if [[ -d examples ]]; then - local -a files=(${(@f)$(echo examples/*.rs(:t:r))}) - _values 'example' "${files[@]}" - fi -} - -_cargo diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/cargo.bashcomp.sh b/collector/compile-benchmarks/cargo-0.60.0/src/etc/cargo.bashcomp.sh deleted file mode 100644 index c61f3eed2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/cargo.bashcomp.sh +++ /dev/null @@ -1,270 +0,0 @@ -# Required for bash versions < 4.1 -# Default bash version is 3.2 on latest macOS. See #6874 -shopt -s extglob - -command -v cargo >/dev/null 2>&1 && -_cargo() -{ - local cur prev words cword - _get_comp_words_by_ref cur prev words cword - - COMPREPLY=() - - # Skip past - and + options to find the command. - local nwords=${#words[@]} - local cmd_i cmd dd_i - for (( cmd_i=1; cmd_i<$nwords; cmd_i++ )); - do - if [[ ! "${words[$cmd_i]}" =~ ^[+-] ]]; then - cmd="${words[$cmd_i]}" - break - fi - done - # Find the location of the -- separator. - for (( dd_i=1; dd_i<$nwords-1; dd_i++ )); - do - if [[ "${words[$dd_i]}" = "--" ]]; then - break - fi - done - - local vcs='git hg none pijul fossil' - local color='auto always never' - local msg_format='human json short' - - local opt_help='-h --help' - local opt_verbose='-v --verbose' - local opt_quiet='-q --quiet' - local opt_color='--color' - local opt_common="$opt_help $opt_verbose $opt_quiet $opt_color" - local opt_pkg_spec='-p --package --all --exclude --workspace' - local opt_pkg='-p --package' - local opt_feat='--features --all-features --no-default-features' - local opt_mani='--manifest-path' - local opt_jobs='-j --jobs' - local opt_force='-f --force' - local opt_sync='-s --sync' - local opt_lock='--frozen --locked --offline' - local opt_targets="--lib --bin --bins --example --examples --test --tests --bench --benches --all-targets" - - local opt___nocmd="$opt_common -V --version --list --explain" - local opt__bench="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_targets --message-format --target --no-run --no-fail-fast --target-dir" - local opt__build="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_targets --message-format --target --release --profile --target-dir" - local opt__b="$opt__build" - local opt__check="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_targets --message-format --target --release --profile --target-dir" - local opt__c="$opt__check" - local opt__clean="$opt_common $opt_pkg $opt_mani $opt_lock --target --release --doc --target-dir --profile" - local opt__doc="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --bin --bins --lib --target --open --no-deps --release --document-private-items --target-dir --profile" - local opt__d="$opt__doc" - local opt__fetch="$opt_common $opt_mani $opt_lock --target" - local opt__fix="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_jobs $opt_targets $opt_lock --release --target --message-format --broken-code --edition --edition-idioms --allow-no-vcs --allow-dirty --allow-staged --profile --target-dir" - local opt__generate_lockfile="$opt_common $opt_mani $opt_lock" - local opt__help="$opt_help" - local opt__init="$opt_common $opt_lock --bin --lib --name --vcs --edition --registry" - local opt__install="$opt_common $opt_feat $opt_jobs $opt_lock $opt_force --bin --bins --branch --debug --example --examples --git --list --path --rev --root --tag --version --registry --target --profile --no-track" - local opt__locate_project="$opt_common $opt_mani $opt_lock --message-format --workspace" - local opt__login="$opt_common $opt_lock --registry" - local opt__metadata="$opt_common $opt_feat $opt_mani $opt_lock --format-version=1 --no-deps --filter-platform" - local opt__new="$opt_common $opt_lock --vcs --bin --lib --name --edition --registry" - local opt__owner="$opt_common $opt_lock -a --add -r --remove -l --list --index --token --registry" - local opt__package="$opt_common $opt_mani $opt_feat $opt_lock $opt_jobs --allow-dirty -l --list --no-verify --no-metadata --target --target-dir" - local opt__pkgid="$opt_common $opt_mani $opt_lock $opt_pkg" - local opt__publish="$opt_common $opt_mani $opt_feat $opt_lock $opt_jobs --allow-dirty --dry-run --token --no-verify --index --registry --target --target-dir" - local opt__read_manifest="$opt_help $opt_quiet $opt_verbose $opt_mani $opt_color $opt_lock --no-deps" - local opt__run="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs --message-format --target --bin --example --release --target-dir --profile" - local opt__r="$opt__run" - local opt__rustc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_targets -L --crate-type --extern --message-format --profile --target --release --target-dir" - local opt__rustdoc="$opt_common $opt_pkg $opt_feat $opt_mani $opt_lock $opt_jobs $opt_targets --message-format --target --release --open --target-dir --profile" - local opt__search="$opt_common $opt_lock --limit --index --registry" - local opt__test="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock $opt_jobs $opt_targets --message-format --doc --target --no-run --release --no-fail-fast --target-dir --profile" - local opt__t="$opt__test" - local opt__tree="$opt_common $opt_pkg_spec $opt_feat $opt_mani $opt_lock --target -i --invert --prefix --no-dedupe --duplicates -d --charset -f --format -e --edges" - local opt__uninstall="$opt_common $opt_lock $opt_pkg --bin --root" - local opt__update="$opt_common $opt_mani $opt_lock $opt_pkg --aggressive --precise --dry-run" - local opt__vendor="$opt_common $opt_mani $opt_lock $opt_sync --no-delete --respect-source-config --versioned-dirs" - local opt__verify_project="$opt_common $opt_mani $opt_lock" - local opt__version="$opt_common $opt_lock" - local opt__yank="$opt_common $opt_lock --vers --undo --index --token --registry" - local opt__libtest="--help --include-ignored --ignored --test --bench --list --logfile --nocapture --test-threads --skip -q --quiet --exact --color --format" - - if [[ $cword -gt $dd_i ]]; then - # Completion after -- separator. - if [[ "${cmd}" = @(test|bench) ]]; then - COMPREPLY=( $( compgen -W "${opt__libtest}" -- "$cur" ) ) - else - # Fallback to filename completion, useful with `cargo run`. - _filedir - fi - elif [[ $cword -le $cmd_i ]]; then - # Completion before or at the command. - if [[ "$cur" == -* ]]; then - COMPREPLY=( $( compgen -W "${opt___nocmd}" -- "$cur" ) ) - elif [[ "$cur" == +* ]]; then - COMPREPLY=( $( compgen -W "$(_toolchains)" -- "$cur" ) ) - else - COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) - fi - else - case "${prev}" in - --vcs) - COMPREPLY=( $( compgen -W "$vcs" -- "$cur" ) ) - ;; - --color) - COMPREPLY=( $( compgen -W "$color" -- "$cur" ) ) - ;; - --message-format) - COMPREPLY=( $( compgen -W "$msg_format" -- "$cur" ) ) - ;; - --manifest-path) - _filedir toml - ;; - --bin) - COMPREPLY=( $( compgen -W "$(_bin_names)" -- "$cur" ) ) - ;; - --test) - COMPREPLY=( $( compgen -W "$(_test_names)" -- "$cur" ) ) - ;; - --bench) - COMPREPLY=( $( compgen -W "$(_benchmark_names)" -- "$cur" ) ) - ;; - --example) - COMPREPLY=( $( compgen -W "$(_get_examples)" -- "$cur" ) ) - ;; - --target) - COMPREPLY=( $( compgen -W "$(_get_targets)" -- "$cur" ) ) - ;; - --target-dir) - _filedir -d - ;; - help) - COMPREPLY=( $( compgen -W "$__cargo_commands" -- "$cur" ) ) - ;; - *) - local opt_var=opt__${cmd//-/_} - if [[ -z "${!opt_var}" ]]; then - # Fallback to filename completion. - _filedir - else - COMPREPLY=( $( compgen -W "${!opt_var}" -- "$cur" ) ) - fi - ;; - esac - fi - - # compopt does not work in bash version 3 - - return 0 -} && -complete -F _cargo cargo - -__cargo_commands=$(cargo --list 2>/dev/null | awk 'NR>1 {print $1}') - -_locate_manifest(){ - cargo locate-project --message-format plain 2>/dev/null -} - -# Extracts the values of "name" from the array given in $1 and shows them as -# command line options for completion -_get_names_from_array() -{ - local manifest=$(_locate_manifest) - if [[ -z $manifest ]]; then - return 0 - fi - - local last_line - local -a names - local in_block=false - local block_name=$1 - while read line - do - if [[ $last_line == "[[$block_name]]" ]]; then - in_block=true - else - if [[ $last_line =~ .*\[\[.* ]]; then - in_block=false - fi - fi - - if [[ $in_block == true ]]; then - if [[ $line =~ .*name.*\= ]]; then - line=${line##*=} - line=${line%%\"} - line=${line##*\"} - names+=($line) - fi - fi - - last_line=$line - done < $manifest - echo "${names[@]}" -} - -#Gets the bin names from the manifest file -_bin_names() -{ - _get_names_from_array "bin" -} - -#Gets the test names from the manifest file -_test_names() -{ - _get_names_from_array "test" -} - -#Gets the bench names from the manifest file -_benchmark_names() -{ - _get_names_from_array "bench" -} - -_get_examples(){ - local manifest=$(_locate_manifest) - [ -z "$manifest" ] && return 0 - - local files=("${manifest%/*}"/examples/*.rs) - local names=("${files[@]##*/}") - local names=("${names[@]%.*}") - # "*" means no examples found - if [[ "${names[@]}" != "*" ]]; then - echo "${names[@]}" - fi -} - -_get_targets(){ - local result=() - local targets=$(rustup target list) - while read line - do - if [[ "$line" =~ default|installed ]]; then - result+=("${line%% *}") - fi - done <<< "$targets" - echo "${result[@]}" -} - -_toolchains(){ - local result=() - local toolchains=$(rustup toolchain list) - local channels="nightly|beta|stable|[0-9]\.[0-9]{1,2}\.[0-9]" - local date="[0-9]{4}-[0-9]{2}-[0-9]{2}" - while read line - do - # Strip " (default)" - line=${line%% *} - if [[ "$line" =~ ^($channels)(-($date))?(-.*) ]]; then - if [[ -z ${BASH_REMATCH[3]} ]]; then - result+=("+${BASH_REMATCH[1]}") - else - # channel-date - result+=("+${BASH_REMATCH[1]}-${BASH_REMATCH[3]}") - fi - result+=("+$line") - else - result+=("+$line") - fi - done <<< "$toolchains" - echo "${result[@]}" -} - -# vim:ft=sh diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-bench.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-bench.1 deleted file mode 100644 index 105cef5a6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-bench.1 +++ /dev/null @@ -1,471 +0,0 @@ -'\" t -.TH "CARGO\-BENCH" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-bench \- Execute benchmarks of a package -.SH "SYNOPSIS" -\fBcargo bench\fR [\fIoptions\fR] [\fIbenchname\fR] [\fB\-\-\fR \fIbench\-options\fR] -.SH "DESCRIPTION" -Compile and execute benchmarks. -.sp -The benchmark filtering argument \fIbenchname\fR and all the arguments following -the two dashes (\fB\-\-\fR) are passed to the benchmark binaries and thus to -\fIlibtest\fR (rustc's built in unit\-test and micro\-benchmarking framework). If -you are passing arguments to both Cargo and the binary, the ones after \fB\-\-\fR go -to the binary, the ones before go to Cargo. For details about libtest's -arguments see the output of \fBcargo bench \-\- \-\-help\fR and check out the rustc -book's chapter on how tests work at -\&. -.sp -As an example, this will run only the benchmark named \fBfoo\fR (and skip other -similarly named benchmarks like \fBfoobar\fR): -.sp -.RS 4 -.nf -cargo bench \-\- foo \-\-exact -.fi -.RE -.sp -Benchmarks are built with the \fB\-\-test\fR option to \fBrustc\fR which creates an -executable with a \fBmain\fR function that automatically runs all functions -annotated with the \fB#[bench]\fR attribute. Cargo passes the \fB\-\-bench\fR flag to -the test harness to tell it to run only benchmarks. -.sp -The libtest harness may be disabled by setting \fBharness = false\fR in the target -manifest settings, in which case your code will need to provide its own \fBmain\fR -function to handle running benchmarks. -.RS 3 -.ll -5 -.sp -\fBNote\fR: The -\fI\f(BI#[bench]\fI attribute\fR -is currently unstable and only available on the -\fInightly channel\fR \&. -There are some packages available on -\fIcrates.io\fR that may help with -running benchmarks on the stable channel, such as -\fICriterion\fR \&. -.br -.RE -.ll -.sp -By default, \fBcargo bench\fR uses the \fI\f(BIbench\fI profile\fR , which enables -optimizations and disables debugging information. If you need to debug a -benchmark, you can use the \fB\-\-profile=dev\fR command\-line option to switch to -the dev profile. You can then run the debug\-enabled benchmark within a -debugger. -.SH "OPTIONS" -.SS "Benchmark Options" -.sp -\fB\-\-no\-run\fR -.RS 4 -Compile, but don't run benchmarks. -.RE -.sp -\fB\-\-no\-fail\-fast\fR -.RS 4 -Run all benchmarks regardless of failure. Without this flag, Cargo will exit -after the first executable fails. The Rust test harness will run all benchmarks -within the executable to completion, this flag only applies to the executable -as a whole. -.RE -.SS "Package Selection" -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. -.sp -The default members of a workspace can be set explicitly with the -\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Benchmark only the specified packages. See \fBcargo\-pkgid\fR(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -.RE -.sp -\fB\-\-workspace\fR -.RS 4 -Benchmark all members in the workspace. -.RE -.sp -\fB\-\-all\fR -.RS 4 -Deprecated alias for \fB\-\-workspace\fR\&. -.RE -.sp -\fB\-\-exclude\fR \fISPEC\fR\&... -.RS 4 -Exclude the specified packages. Must be used in conjunction with the -\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports -common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo bench\fR will build the -following targets of the selected packages: -.sp -.RS 4 -\h'-04'\(bu\h'+02'lib \[em] used to link with binaries and benchmarks -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'bins (only if benchmark targets are built and required features are -available) -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'lib as a benchmark -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'bins as benchmarks -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'benchmark targets -.RE -.sp -The default behavior can be changed by setting the \fBbench\fR flag for the target -in the manifest settings. Setting examples to \fBbench = true\fR will build and -run the example as a benchmark. Setting targets to \fBbench = false\fR will stop -them from being benchmarked by default. Target selection options that take a -target by name ignore the \fBbench\fR flag and will always benchmark the given -target. -.sp -Passing target selection flags will benchmark only the specified -targets. -.sp -Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also -support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. -.sp -\fB\-\-lib\fR -.RS 4 -Benchmark the package's library. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Benchmark the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Benchmark all binary targets. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Benchmark the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Benchmark all example targets. -.RE -.sp -\fB\-\-test\fR \fIname\fR\&... -.RS 4 -Benchmark the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-tests\fR -.RS 4 -Benchmark all targets in test mode that have the \fBtest = true\fR manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the \fBtest\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-bench\fR \fIname\fR\&... -.RS 4 -Benchmark the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-benches\fR -.RS 4 -Benchmark all targets in benchmark mode that have the \fBbench = true\fR -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the \fBbench\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-all\-targets\fR -.RS 4 -Benchmark all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Benchmark for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Benchmark with the given profile. -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Benchmark the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Display Options" -By default the Rust test harness hides output from benchmark execution to keep -results readable. Benchmark output can be recovered (e.g., for debugging) by -passing \fB\-\-nocapture\fR to the benchmark binaries: -.sp -.RS 4 -.nf -cargo bench \-\- \-\-nocapture -.fi -.RE -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -The \fB\-\-jobs\fR argument affects the building of the benchmark executable but -does not affect how many threads are used when running the benchmarks. The -Rust test harness runs benchmarks serially in a single thread. -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Build and execute all the benchmarks of the current package: -.sp -.RS 4 -.nf -cargo bench -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Run only a specific benchmark within a specific benchmark target: -.sp -.RS 4 -.nf -cargo bench \-\-bench bench_name \-\- modname::some_benchmark -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-test\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-build.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-build.1 deleted file mode 100644 index 6a82bdd4b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-build.1 +++ /dev/null @@ -1,407 +0,0 @@ -'\" t -.TH "CARGO\-BUILD" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-build \- Compile the current package -.SH "SYNOPSIS" -\fBcargo build\fR [\fIoptions\fR] -.SH "DESCRIPTION" -Compile local packages and all of their dependencies. -.SH "OPTIONS" -.SS "Package Selection" -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. -.sp -The default members of a workspace can be set explicitly with the -\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Build only the specified packages. See \fBcargo\-pkgid\fR(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -.RE -.sp -\fB\-\-workspace\fR -.RS 4 -Build all members in the workspace. -.RE -.sp -\fB\-\-all\fR -.RS 4 -Deprecated alias for \fB\-\-workspace\fR\&. -.RE -.sp -\fB\-\-exclude\fR \fISPEC\fR\&... -.RS 4 -Exclude the specified packages. Must be used in conjunction with the -\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports -common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo build\fR will build all -binary and library targets of the selected packages. Binaries are skipped if -they have \fBrequired\-features\fR that are missing. -.sp -Passing target selection flags will build only the specified -targets. -.sp -Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also -support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. -.sp -\fB\-\-lib\fR -.RS 4 -Build the package's library. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Build the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Build all binary targets. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Build the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Build all example targets. -.RE -.sp -\fB\-\-test\fR \fIname\fR\&... -.RS 4 -Build the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-tests\fR -.RS 4 -Build all targets in test mode that have the \fBtest = true\fR manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the \fBtest\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-bench\fR \fIname\fR\&... -.RS 4 -Build the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-benches\fR -.RS 4 -Build all targets in benchmark mode that have the \fBbench = true\fR -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the \fBbench\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-all\-targets\fR -.RS 4 -Build all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Build for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-r\fR, -\fB\-\-release\fR -.RS 4 -Build optimized artifacts with the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Build with the given profile. -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Build the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.sp -\fB\-\-out\-dir\fR \fIdirectory\fR -.RS 4 -Copy final artifacts to this directory. -.sp -This option is unstable and available only on the -\fInightly channel\fR -and requires the \fB\-Z unstable\-options\fR flag to enable. -See for more information. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.sp -\fB\-\-build\-plan\fR -.RS 4 -Outputs a series of JSON messages to stdout that indicate the commands to run -the build. -.sp -This option is unstable and available only on the -\fInightly channel\fR -and requires the \fB\-Z unstable\-options\fR flag to enable. -See for more information. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.sp -\fB\-\-future\-incompat\-report\fR -.RS 4 -Displays a future\-incompat report for any future\-incompatible warnings -produced during execution of this command -.sp -See \fBcargo\-report\fR(1) -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Build the local package and all of its dependencies: -.sp -.RS 4 -.nf -cargo build -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Build with optimizations: -.sp -.RS 4 -.nf -cargo build \-\-release -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-rustc\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-check.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-check.1 deleted file mode 100644 index d576f2bbb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-check.1 +++ /dev/null @@ -1,397 +0,0 @@ -'\" t -.TH "CARGO\-CHECK" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-check \- Check the current package -.SH "SYNOPSIS" -\fBcargo check\fR [\fIoptions\fR] -.SH "DESCRIPTION" -Check a local package and all of its dependencies for errors. This will -essentially compile the packages without performing the final step of code -generation, which is faster than running \fBcargo build\fR\&. The compiler will save -metadata files to disk so that future runs will reuse them if the source has -not been modified. Some diagnostics and errors are only emitted during code -generation, so they inherently won't be reported with \fBcargo check\fR\&. -.SH "OPTIONS" -.SS "Package Selection" -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. -.sp -The default members of a workspace can be set explicitly with the -\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Check only the specified packages. See \fBcargo\-pkgid\fR(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -.RE -.sp -\fB\-\-workspace\fR -.RS 4 -Check all members in the workspace. -.RE -.sp -\fB\-\-all\fR -.RS 4 -Deprecated alias for \fB\-\-workspace\fR\&. -.RE -.sp -\fB\-\-exclude\fR \fISPEC\fR\&... -.RS 4 -Exclude the specified packages. Must be used in conjunction with the -\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports -common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo check\fR will check all -binary and library targets of the selected packages. Binaries are skipped if -they have \fBrequired\-features\fR that are missing. -.sp -Passing target selection flags will check only the specified -targets. -.sp -Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also -support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. -.sp -\fB\-\-lib\fR -.RS 4 -Check the package's library. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Check the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Check all binary targets. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Check the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Check all example targets. -.RE -.sp -\fB\-\-test\fR \fIname\fR\&... -.RS 4 -Check the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-tests\fR -.RS 4 -Check all targets in test mode that have the \fBtest = true\fR manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the \fBtest\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-bench\fR \fIname\fR\&... -.RS 4 -Check the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-benches\fR -.RS 4 -Check all targets in benchmark mode that have the \fBbench = true\fR -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the \fBbench\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-all\-targets\fR -.RS 4 -Check all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Check for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-r\fR, -\fB\-\-release\fR -.RS 4 -Check optimized artifacts with the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Check with the given profile. -.sp -As a special case, specifying the \fBtest\fR profile will also enable checking in -test mode which will enable checking tests and enable the \fBtest\fR cfg option. -See \fIrustc tests\fR for more -detail. -.sp -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Check the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.sp -\fB\-\-future\-incompat\-report\fR -.RS 4 -Displays a future\-incompat report for any future\-incompatible warnings -produced during execution of this command -.sp -See \fBcargo\-report\fR(1) -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Check the local package for errors: -.sp -.RS 4 -.nf -cargo check -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Check all targets, including unit tests: -.sp -.RS 4 -.nf -cargo check \-\-all\-targets \-\-profile=test -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-build\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-clean.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-clean.1 deleted file mode 100644 index 20b9fa1f1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-clean.1 +++ /dev/null @@ -1,196 +0,0 @@ -'\" t -.TH "CARGO\-CLEAN" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-clean \- Remove generated artifacts -.SH "SYNOPSIS" -\fBcargo clean\fR [\fIoptions\fR] -.SH "DESCRIPTION" -Remove artifacts from the target directory that Cargo has generated in the -past. -.sp -With no options, \fBcargo clean\fR will delete the entire target directory. -.SH "OPTIONS" -.SS "Package Selection" -When no packages are selected, all packages and all dependencies in the -workspace are cleaned. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Clean only the specified packages. This flag may be specified -multiple times. See \fBcargo\-pkgid\fR(1) for the SPEC format. -.RE -.SS "Clean Options" -.sp -\fB\-\-doc\fR -.RS 4 -This option will cause \fBcargo clean\fR to remove only the \fBdoc\fR directory in -the target directory. -.RE -.sp -\fB\-\-release\fR -.RS 4 -Remove all artifacts in the \fBrelease\fR directory. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Remove all artifacts in the directory with the given profile name. -.RE -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Clean for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Remove the entire target directory: -.sp -.RS 4 -.nf -cargo clean -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Remove only the release artifacts: -.sp -.RS 4 -.nf -cargo clean \-\-release -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-build\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-doc.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-doc.1 deleted file mode 100644 index 3f718c367..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-doc.1 +++ /dev/null @@ -1,347 +0,0 @@ -'\" t -.TH "CARGO\-DOC" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-doc \- Build a package's documentation -.SH "SYNOPSIS" -\fBcargo doc\fR [\fIoptions\fR] -.SH "DESCRIPTION" -Build the documentation for the local package and all dependencies. The output -is placed in \fBtarget/doc\fR in rustdoc's usual format. -.SH "OPTIONS" -.SS "Documentation Options" -.sp -\fB\-\-open\fR -.RS 4 -Open the docs in a browser after building them. This will use your default -browser unless you define another one in the \fBBROWSER\fR environment variable -or use the \fI\f(BIdoc.browser\fI\fR configuration -option. -.RE -.sp -\fB\-\-no\-deps\fR -.RS 4 -Do not build documentation for dependencies. -.RE -.sp -\fB\-\-document\-private\-items\fR -.RS 4 -Include non\-public items in the documentation. This will be enabled by default if documenting a binary target. -.RE -.SS "Package Selection" -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. -.sp -The default members of a workspace can be set explicitly with the -\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Document only the specified packages. See \fBcargo\-pkgid\fR(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -.RE -.sp -\fB\-\-workspace\fR -.RS 4 -Document all members in the workspace. -.RE -.sp -\fB\-\-all\fR -.RS 4 -Deprecated alias for \fB\-\-workspace\fR\&. -.RE -.sp -\fB\-\-exclude\fR \fISPEC\fR\&... -.RS 4 -Exclude the specified packages. Must be used in conjunction with the -\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports -common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo doc\fR will document all -binary and library targets of the selected package. The binary will be skipped -if its name is the same as the lib target. Binaries are skipped if they have -\fBrequired\-features\fR that are missing. -.sp -The default behavior can be changed by setting \fBdoc = false\fR for the target in -the manifest settings. Using target selection options will ignore the \fBdoc\fR -flag and will always document the given target. -.sp -\fB\-\-lib\fR -.RS 4 -Document the package's library. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Document the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Document all binary targets. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Document the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Document all example targets. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Document for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-r\fR, -\fB\-\-release\fR -.RS 4 -Document optimized artifacts with the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Document with the given profile. -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Document the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Build the local package documentation and its dependencies and output to -\fBtarget/doc\fR\&. -.sp -.RS 4 -.nf -cargo doc -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-rustdoc\fR(1), \fBrustdoc\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-fetch.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-fetch.1 deleted file mode 100644 index ef5d989dc..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-fetch.1 +++ /dev/null @@ -1,160 +0,0 @@ -'\" t -.TH "CARGO\-FETCH" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-fetch \- Fetch dependencies of a package from the network -.SH "SYNOPSIS" -\fBcargo fetch\fR [\fIoptions\fR] -.SH "DESCRIPTION" -If a \fBCargo.lock\fR file is available, this command will ensure that all of the -git dependencies and/or registry dependencies are downloaded and locally -available. Subsequent Cargo commands never touch the network after a \fBcargo fetch\fR unless the lock file changes. -.sp -If the lock file is not available, then this command will generate the lock -file before fetching the dependencies. -.sp -If \fB\-\-target\fR is not specified, then all target dependencies are fetched. -.sp -See also the \fIcargo\-prefetch\fR -plugin which adds a command to download popular crates. This may be useful if -you plan to use Cargo without a network with the \fB\-\-offline\fR flag. -.SH "OPTIONS" -.SS "Fetch options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Fetch for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Fetch all dependencies: -.sp -.RS 4 -.nf -cargo fetch -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-update\fR(1), \fBcargo\-generate\-lockfile\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-fix.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-fix.1 deleted file mode 100644 index d50e04a8d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-fix.1 +++ /dev/null @@ -1,494 +0,0 @@ -'\" t -.TH "CARGO\-FIX" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-fix \- Automatically fix lint warnings reported by rustc -.SH "SYNOPSIS" -\fBcargo fix\fR [\fIoptions\fR] -.SH "DESCRIPTION" -This Cargo subcommand will automatically take rustc's suggestions from -diagnostics like warnings and apply them to your source code. This is intended -to help automate tasks that rustc itself already knows how to tell you to fix! -.sp -Executing \fBcargo fix\fR will under the hood execute \fBcargo\-check\fR(1). Any warnings -applicable to your crate will be automatically fixed (if possible) and all -remaining warnings will be displayed when the check process is finished. For -example if you'd like to apply all fixes to the current package, you can run: -.sp -.RS 4 -.nf -cargo fix -.fi -.RE -.sp -which behaves the same as \fBcargo check \-\-all\-targets\fR\&. -.sp -\fBcargo fix\fR is only capable of fixing code that is normally compiled with -\fBcargo check\fR\&. If code is conditionally enabled with optional features, you -will need to enable those features for that code to be analyzed: -.sp -.RS 4 -.nf -cargo fix \-\-features foo -.fi -.RE -.sp -Similarly, other \fBcfg\fR expressions like platform\-specific code will need to -pass \fB\-\-target\fR to fix code for the given target. -.sp -.RS 4 -.nf -cargo fix \-\-target x86_64\-pc\-windows\-gnu -.fi -.RE -.sp -If you encounter any problems with \fBcargo fix\fR or otherwise have any questions -or feature requests please don't hesitate to file an issue at -\&. -.SS "Edition migration" -The \fBcargo fix\fR subcommand can also be used to migrate a package from one -\fIedition\fR to the next. The general procedure is: -.sp -.RS 4 -\h'-04' 1.\h'+01'Run \fBcargo fix \-\-edition\fR\&. Consider also using the \fB\-\-all\-features\fR flag if -your project has multiple features. You may also want to run \fBcargo fix \-\-edition\fR multiple times with different \fB\-\-target\fR flags if your project -has platform\-specific code gated by \fBcfg\fR attributes. -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Modify \fBCargo.toml\fR to set the \fIedition field\fR to the new edition. -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Run your project tests to verify that everything still works. If new -warnings are issued, you may want to consider running \fBcargo fix\fR again -(without the \fB\-\-edition\fR flag) to apply any suggestions given by the -compiler. -.RE -.sp -And hopefully that's it! Just keep in mind of the caveats mentioned above that -\fBcargo fix\fR cannot update code for inactive features or \fBcfg\fR expressions. -Also, in some rare cases the compiler is unable to automatically migrate all -code to the new edition, and this may require manual changes after building -with the new edition. -.SH "OPTIONS" -.SS "Fix options" -.sp -\fB\-\-broken\-code\fR -.RS 4 -Fix code even if it already has compiler errors. This is useful if \fBcargo fix\fR -fails to apply the changes. It will apply the changes and leave the broken -code in the working directory for you to inspect and manually fix. -.RE -.sp -\fB\-\-edition\fR -.RS 4 -Apply changes that will update the code to the next edition. This will not -update the edition in the \fBCargo.toml\fR manifest, which must be updated -manually after \fBcargo fix \-\-edition\fR has finished. -.RE -.sp -\fB\-\-edition\-idioms\fR -.RS 4 -Apply suggestions that will update code to the preferred style for the current -edition. -.RE -.sp -\fB\-\-allow\-no\-vcs\fR -.RS 4 -Fix code even if a VCS was not detected. -.RE -.sp -\fB\-\-allow\-dirty\fR -.RS 4 -Fix code even if the working directory has changes. -.RE -.sp -\fB\-\-allow\-staged\fR -.RS 4 -Fix code even if the working directory has staged changes. -.RE -.SS "Package Selection" -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. -.sp -The default members of a workspace can be set explicitly with the -\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Fix only the specified packages. See \fBcargo\-pkgid\fR(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -.RE -.sp -\fB\-\-workspace\fR -.RS 4 -Fix all members in the workspace. -.RE -.sp -\fB\-\-all\fR -.RS 4 -Deprecated alias for \fB\-\-workspace\fR\&. -.RE -.sp -\fB\-\-exclude\fR \fISPEC\fR\&... -.RS 4 -Exclude the specified packages. Must be used in conjunction with the -\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports -common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo fix\fR will fix all targets -(\fB\-\-all\-targets\fR implied). Binaries are skipped if they have -\fBrequired\-features\fR that are missing. -.sp -Passing target selection flags will fix only the specified -targets. -.sp -Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also -support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. -.sp -\fB\-\-lib\fR -.RS 4 -Fix the package's library. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Fix the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Fix all binary targets. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Fix the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Fix all example targets. -.RE -.sp -\fB\-\-test\fR \fIname\fR\&... -.RS 4 -Fix the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-tests\fR -.RS 4 -Fix all targets in test mode that have the \fBtest = true\fR manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the \fBtest\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-bench\fR \fIname\fR\&... -.RS 4 -Fix the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-benches\fR -.RS 4 -Fix all targets in benchmark mode that have the \fBbench = true\fR -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the \fBbench\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-all\-targets\fR -.RS 4 -Fix all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Fix for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-r\fR, -\fB\-\-release\fR -.RS 4 -Fix optimized artifacts with the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Fix with the given profile. -.sp -As a special case, specifying the \fBtest\fR profile will also enable checking in -test mode which will enable checking tests and enable the \fBtest\fR cfg option. -See \fIrustc tests\fR for more -detail. -.sp -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Fix the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Apply compiler suggestions to the local package: -.sp -.RS 4 -.nf -cargo fix -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Update a package to prepare it for the next edition: -.sp -.RS 4 -.nf -cargo fix \-\-edition -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Apply suggested idioms for the current edition: -.sp -.RS 4 -.nf -cargo fix \-\-edition\-idioms -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-check\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-generate-lockfile.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-generate-lockfile.1 deleted file mode 100644 index c4177d5c9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-generate-lockfile.1 +++ /dev/null @@ -1,138 +0,0 @@ -'\" t -.TH "CARGO\-GENERATE\-LOCKFILE" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-generate\-lockfile \- Generate the lockfile for a package -.SH "SYNOPSIS" -\fBcargo generate\-lockfile\fR [\fIoptions\fR] -.SH "DESCRIPTION" -This command will create the \fBCargo.lock\fR lockfile for the current package or -workspace. If the lockfile already exists, it will be rebuilt with the latest -available version of every package. -.sp -See also \fBcargo\-update\fR(1) which is also capable of creating a \fBCargo.lock\fR -lockfile and has more options for controlling update behavior. -.SH "OPTIONS" -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Create or update the lockfile for the current package or workspace: -.sp -.RS 4 -.nf -cargo generate\-lockfile -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-update\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-help.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-help.1 deleted file mode 100644 index 8ff0ad22e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-help.1 +++ /dev/null @@ -1,34 +0,0 @@ -'\" t -.TH "CARGO\-HELP" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-help \- Get help for a Cargo command -.SH "SYNOPSIS" -\fBcargo help\fR [\fIsubcommand\fR] -.SH "DESCRIPTION" -Prints a help message for the given command. -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Get help for a command: -.sp -.RS 4 -.nf -cargo help build -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Help is also available with the \fB\-\-help\fR flag: -.sp -.RS 4 -.nf -cargo build \-\-help -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-init.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-init.1 deleted file mode 100644 index 79502736b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-init.1 +++ /dev/null @@ -1,151 +0,0 @@ -'\" t -.TH "CARGO\-INIT" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-init \- Create a new Cargo package in an existing directory -.SH "SYNOPSIS" -\fBcargo init\fR [\fIoptions\fR] [\fIpath\fR] -.SH "DESCRIPTION" -This command will create a new Cargo manifest in the current directory. Give a -path as an argument to create in the given directory. -.sp -If there are typically\-named Rust source files already in the directory, those -will be used. If not, then a sample \fBsrc/main.rs\fR file will be created, or -\fBsrc/lib.rs\fR if \fB\-\-lib\fR is passed. -.sp -If the directory is not already in a VCS repository, then a new repository -is created (see \fB\-\-vcs\fR below). -.sp -See \fBcargo\-new\fR(1) for a similar command which will create a new package in -a new directory. -.SH "OPTIONS" -.SS "Init Options" -.sp -\fB\-\-bin\fR -.RS 4 -Create a package with a binary target (\fBsrc/main.rs\fR). -This is the default behavior. -.RE -.sp -\fB\-\-lib\fR -.RS 4 -Create a package with a library target (\fBsrc/lib.rs\fR). -.RE -.sp -\fB\-\-edition\fR \fIedition\fR -.RS 4 -Specify the Rust edition to use. Default is 2021. -Possible values: 2015, 2018, 2021 -.RE -.sp -\fB\-\-name\fR \fIname\fR -.RS 4 -Set the package name. Defaults to the directory name. -.RE -.sp -\fB\-\-vcs\fR \fIvcs\fR -.RS 4 -Initialize a new VCS repository for the given version control system (git, -hg, pijul, or fossil) or do not initialize any version control at all -(none). If not specified, defaults to \fBgit\fR or the configuration value -\fBcargo\-new.vcs\fR, or \fBnone\fR if already inside a VCS repository. -.RE -.sp -\fB\-\-registry\fR \fIregistry\fR -.RS 4 -This sets the \fBpublish\fR field in \fBCargo.toml\fR to the given registry name -which will restrict publishing only to that registry. -.sp -Registry names are defined in \fICargo config files\fR \&. -If not specified, the default registry defined by the \fBregistry.default\fR -config key is used. If the default registry is not set and \fB\-\-registry\fR is not -used, the \fBpublish\fR field will not be set which means that publishing will not -be restricted. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Create a binary Cargo package in the current directory: -.sp -.RS 4 -.nf -cargo init -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-new\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-install.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-install.1 deleted file mode 100644 index 99c1fad53..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-install.1 +++ /dev/null @@ -1,452 +0,0 @@ -'\" t -.TH "CARGO\-INSTALL" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-install \- Build and install a Rust binary -.SH "SYNOPSIS" -\fBcargo install\fR [\fIoptions\fR] \fIcrate\fR\&... -.br -\fBcargo install\fR [\fIoptions\fR] \fB\-\-path\fR \fIpath\fR -.br -\fBcargo install\fR [\fIoptions\fR] \fB\-\-git\fR \fIurl\fR [\fIcrate\fR\&...] -.br -\fBcargo install\fR [\fIoptions\fR] \fB\-\-list\fR -.SH "DESCRIPTION" -This command manages Cargo's local set of installed binary crates. Only -packages which have executable \fB[[bin]]\fR or \fB[[example]]\fR targets can be -installed, and all executables are installed into the installation root's -\fBbin\fR folder. -.sp -The installation root is determined, in order of precedence: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB\-\-root\fR option -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCARGO_INSTALL_ROOT\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBinstall.root\fR Cargo \fIconfig value\fR -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCARGO_HOME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB$HOME/.cargo\fR -.RE -.sp -There are multiple sources from which a crate can be installed. The default -location is crates.io but the \fB\-\-git\fR, \fB\-\-path\fR, and \fB\-\-registry\fR flags can -change this source. If the source contains more than one package (such as -crates.io or a git repository with multiple crates) the \fIcrate\fR argument is -required to indicate which crate should be installed. -.sp -Crates from crates.io can optionally specify the version they wish to install -via the \fB\-\-version\fR flags, and similarly packages from git repositories can -optionally specify the branch, tag, or revision that should be installed. If a -crate has multiple binaries, the \fB\-\-bin\fR argument can selectively install only -one of them, and if you'd rather install examples the \fB\-\-example\fR argument can -be used as well. -.sp -If the package is already installed, Cargo will reinstall it if the installed -version does not appear to be up\-to\-date. If any of the following values -change, then Cargo will reinstall the package: -.sp -.RS 4 -\h'-04'\(bu\h'+02'The package version and source. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'The set of binary names installed. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'The chosen features. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'The profile (\fB\-\-profile\fR). -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'The target (\fB\-\-target\fR). -.RE -.sp -Installing with \fB\-\-path\fR will always build and install, unless there are -conflicting binaries from another package. The \fB\-\-force\fR flag may be used to -force Cargo to always reinstall the package. -.sp -If the source is crates.io or \fB\-\-git\fR then by default the crate will be built -in a temporary target directory. To avoid this, the target directory can be -specified by setting the \fBCARGO_TARGET_DIR\fR environment variable to a relative -path. In particular, this can be useful for caching build artifacts on -continuous integration systems. -.sp -By default, the \fBCargo.lock\fR file that is included with the package will be -ignored. This means that Cargo will recompute which versions of dependencies -to use, possibly using newer versions that have been released since the -package was published. The \fB\-\-locked\fR flag can be used to force Cargo to use -the packaged \fBCargo.lock\fR file if it is available. This may be useful for -ensuring reproducible builds, to use the exact same set of dependencies that -were available when the package was published. It may also be useful if a -newer version of a dependency is published that no longer builds on your -system, or has other problems. The downside to using \fB\-\-locked\fR is that you -will not receive any fixes or updates to any dependency. Note that Cargo did -not start publishing \fBCargo.lock\fR files until version 1.37, which means -packages published with prior versions will not have a \fBCargo.lock\fR file -available. -.SH "OPTIONS" -.SS "Install Options" -.sp -\fB\-\-vers\fR \fIversion\fR, -\fB\-\-version\fR \fIversion\fR -.RS 4 -Specify a version to install. This may be a \fIversion -requirement\fR , like \fB~1.2\fR, to have Cargo -select the newest version from the given requirement. If the version does not -have a requirement operator (such as \fB^\fR or \fB~\fR), then it must be in the form -\fIMAJOR.MINOR.PATCH\fR, and will install exactly that version; it is \fInot\fR -treated as a caret requirement like Cargo dependencies are. -.RE -.sp -\fB\-\-git\fR \fIurl\fR -.RS 4 -Git URL to install the specified crate from. -.RE -.sp -\fB\-\-branch\fR \fIbranch\fR -.RS 4 -Branch to use when installing from git. -.RE -.sp -\fB\-\-tag\fR \fItag\fR -.RS 4 -Tag to use when installing from git. -.RE -.sp -\fB\-\-rev\fR \fIsha\fR -.RS 4 -Specific commit to use when installing from git. -.RE -.sp -\fB\-\-path\fR \fIpath\fR -.RS 4 -Filesystem path to local crate to install. -.RE -.sp -\fB\-\-list\fR -.RS 4 -List all installed packages and their versions. -.RE -.sp -\fB\-f\fR, -\fB\-\-force\fR -.RS 4 -Force overwriting existing crates or binaries. This can be used if a package -has installed a binary with the same name as another package. This is also -useful if something has changed on the system that you want to rebuild with, -such as a newer version of \fBrustc\fR\&. -.RE -.sp -\fB\-\-no\-track\fR -.RS 4 -By default, Cargo keeps track of the installed packages with a metadata file -stored in the installation root directory. This flag tells Cargo not to use or -create that file. With this flag, Cargo will refuse to overwrite any existing -files unless the \fB\-\-force\fR flag is used. This also disables Cargo's ability to -protect against multiple concurrent invocations of Cargo installing at the -same time. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Install only the specified binary. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Install all binaries. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Install only the specified example. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Install all examples. -.RE -.sp -\fB\-\-root\fR \fIdir\fR -.RS 4 -Directory to install packages into. -.RE -.sp -\fB\-\-registry\fR \fIregistry\fR -.RS 4 -Name of the registry to use. Registry names are defined in \fICargo config -files\fR \&. If not specified, the default registry is used, -which is defined by the \fBregistry.default\fR config key which defaults to -\fBcrates\-io\fR\&. -.RE -.sp -\fB\-\-index\fR \fIindex\fR -.RS 4 -The URL of the registry index to use. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Install for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to a new temporary folder located in the -temporary directory of the platform. -.sp -When using \fB\-\-path\fR, by default it will use \fBtarget\fR directory in the workspace -of the local crate unless \fB\-\-target\-dir\fR -is specified. -.RE -.sp -\fB\-\-debug\fR -.RS 4 -Build with the \fBdev\fR profile instead the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Install with the given profile. -See the \fIthe reference\fR for more details on profiles. -.RE -.SS "Manifest Options" -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Install or upgrade a package from crates.io: -.sp -.RS 4 -.nf -cargo install ripgrep -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Install or reinstall the package in the current directory: -.sp -.RS 4 -.nf -cargo install \-\-path . -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'View the list of installed packages: -.sp -.RS 4 -.nf -cargo install \-\-list -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-uninstall\fR(1), \fBcargo\-search\fR(1), \fBcargo\-publish\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-locate-project.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-locate-project.1 deleted file mode 100644 index 0fd5be1e6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-locate-project.1 +++ /dev/null @@ -1,124 +0,0 @@ -'\" t -.TH "CARGO\-LOCATE\-PROJECT" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-locate\-project \- Print a JSON representation of a Cargo.toml file's location -.SH "SYNOPSIS" -\fBcargo locate\-project\fR [\fIoptions\fR] -.SH "DESCRIPTION" -This command will print a JSON object to stdout with the full path to the -\fBCargo.toml\fR manifest. -.SH "OPTIONS" -.sp -\fB\-\-workspace\fR -.RS 4 -Locate the \fBCargo.toml\fR at the root of the workspace, as opposed to the current -workspace member. -.RE -.SS "Display Options" -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The representation in which to print the project location. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR (default): JSON object with the path under the key "root". -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBplain\fR: Just the path. -.RE -.RE -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Display the path to the manifest based on the current directory: -.sp -.RS 4 -.nf -cargo locate\-project -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-metadata\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-login.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-login.1 deleted file mode 100644 index d0cadb46e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-login.1 +++ /dev/null @@ -1,115 +0,0 @@ -'\" t -.TH "CARGO\-LOGIN" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-login \- Save an API token from the registry locally -.SH "SYNOPSIS" -\fBcargo login\fR [\fIoptions\fR] [\fItoken\fR] -.SH "DESCRIPTION" -This command will save the API token to disk so that commands that require -authentication, such as \fBcargo\-publish\fR(1), will be automatically -authenticated. The token is saved in \fB$CARGO_HOME/credentials.toml\fR\&. \fBCARGO_HOME\fR -defaults to \fB\&.cargo\fR in your home directory. -.sp -If the \fItoken\fR argument is not specified, it will be read from stdin. -.sp -The API token for crates.io may be retrieved from \&. -.sp -Take care to keep the token secret, it should not be shared with anyone else. -.SH "OPTIONS" -.SS "Login Options" -.sp -\fB\-\-registry\fR \fIregistry\fR -.RS 4 -Name of the registry to use. Registry names are defined in \fICargo config -files\fR \&. If not specified, the default registry is used, -which is defined by the \fBregistry.default\fR config key which defaults to -\fBcrates\-io\fR\&. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Save the API token to disk: -.sp -.RS 4 -.nf -cargo login -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-publish\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-metadata.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-metadata.1 deleted file mode 100644 index 89a05a79c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-metadata.1 +++ /dev/null @@ -1,461 +0,0 @@ -'\" t -.TH "CARGO\-METADATA" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-metadata \- Machine\-readable metadata about the current package -.SH "SYNOPSIS" -\fBcargo metadata\fR [\fIoptions\fR] -.SH "DESCRIPTION" -Output JSON to stdout containing information about the workspace members and -resolved dependencies of the current package. -.sp -It is recommended to include the \fB\-\-format\-version\fR flag to future\-proof -your code to ensure the output is in the format you are expecting. -.sp -See the \fIcargo_metadata crate\fR -for a Rust API for reading the metadata. -.SH "OUTPUT FORMAT" -The output has the following format: -.sp -.RS 4 -.nf -{ - /* Array of all packages in the workspace. - It also includes all feature\-enabled dependencies unless \-\-no\-deps is used. - */ - "packages": [ - { - /* The name of the package. */ - "name": "my\-package", - /* The version of the package. */ - "version": "0.1.0", - /* The Package ID, a unique identifier for referring to the package. */ - "id": "my\-package 0.1.0 (path+file:///path/to/my\-package)", - /* The license value from the manifest, or null. */ - "license": "MIT/Apache\-2.0", - /* The license\-file value from the manifest, or null. */ - "license_file": "LICENSE", - /* The description value from the manifest, or null. */ - "description": "Package description.", - /* The source ID of the package. This represents where - a package is retrieved from. - This is null for path dependencies and workspace members. - For other dependencies, it is a string with the format: - \- "registry+URL" for registry\-based dependencies. - Example: "registry+https://github.com/rust\-lang/crates.io\-index" - \- "git+URL" for git\-based dependencies. - Example: "git+https://github.com/rust\-lang/cargo?rev=5e85ba14aaa20f8133863373404cb0af69eeef2c#5e85ba14aaa20f8133863373404cb0af69eeef2c" - */ - "source": null, - /* Array of dependencies declared in the package's manifest. */ - "dependencies": [ - { - /* The name of the dependency. */ - "name": "bitflags", - /* The source ID of the dependency. May be null, see - description for the package source. - */ - "source": "registry+https://github.com/rust\-lang/crates.io\-index", - /* The version requirement for the dependency. - Dependencies without a version requirement have a value of "*". - */ - "req": "^1.0", - /* The dependency kind. - "dev", "build", or null for a normal dependency. - */ - "kind": null, - /* If the dependency is renamed, this is the new name for - the dependency as a string. null if it is not renamed. - */ - "rename": null, - /* Boolean of whether or not this is an optional dependency. */ - "optional": false, - /* Boolean of whether or not default features are enabled. */ - "uses_default_features": true, - /* Array of features enabled. */ - "features": [], - /* The target platform for the dependency. - null if not a target dependency. - */ - "target": "cfg(windows)", - /* The file system path for a local path dependency. - not present if not a path dependency. - */ - "path": "/path/to/dep", - /* A string of the URL of the registry this dependency is from. - If not specified or null, the dependency is from the default - registry (crates.io). - */ - "registry": null - } - ], - /* Array of Cargo targets. */ - "targets": [ - { - /* Array of target kinds. - \- lib targets list the `crate\-type` values from the - manifest such as "lib", "rlib", "dylib", - "proc\-macro", etc. (default ["lib"]) - \- binary is ["bin"] - \- example is ["example"] - \- integration test is ["test"] - \- benchmark is ["bench"] - \- build script is ["custom\-build"] - */ - "kind": [ - "bin" - ], - /* Array of crate types. - \- lib and example libraries list the `crate\-type` values - from the manifest such as "lib", "rlib", "dylib", - "proc\-macro", etc. (default ["lib"]) - \- all other target kinds are ["bin"] - */ - "crate_types": [ - "bin" - ], - /* The name of the target. */ - "name": "my\-package", - /* Absolute path to the root source file of the target. */ - "src_path": "/path/to/my\-package/src/main.rs", - /* The Rust edition of the target. - Defaults to the package edition. - */ - "edition": "2018", - /* Array of required features. - This property is not included if no required features are set. - */ - "required\-features": ["feat1"], - /* Whether the target should be documented by `cargo doc`. */ - "doc": true, - /* Whether or not this target has doc tests enabled, and - the target is compatible with doc testing. - */ - "doctest": false, - /* Whether or not this target should be built and run with `\-\-test` - */ - "test": true - } - ], - /* Set of features defined for the package. - Each feature maps to an array of features or dependencies it - enables. - */ - "features": { - "default": [ - "feat1" - ], - "feat1": [], - "feat2": [] - }, - /* Absolute path to this package's manifest. */ - "manifest_path": "/path/to/my\-package/Cargo.toml", - /* Package metadata. - This is null if no metadata is specified. - */ - "metadata": { - "docs": { - "rs": { - "all\-features": true - } - } - }, - /* List of registries to which this package may be published. - Publishing is unrestricted if null, and forbidden if an empty array. */ - "publish": [ - "crates\-io" - ], - /* Array of authors from the manifest. - Empty array if no authors specified. - */ - "authors": [ - "Jane Doe " - ], - /* Array of categories from the manifest. */ - "categories": [ - "command\-line\-utilities" - ], - /* Optional string that is the default binary picked by cargo run. */ - "default_run": null, - /* Optional string that is the minimum supported rust version */ - "rust_version": "1.56", - /* Array of keywords from the manifest. */ - "keywords": [ - "cli" - ], - /* The readme value from the manifest or null if not specified. */ - "readme": "README.md", - /* The repository value from the manifest or null if not specified. */ - "repository": "https://github.com/rust\-lang/cargo", - /* The homepage value from the manifest or null if not specified. */ - "homepage": "https://rust\-lang.org", - /* The documentation value from the manifest or null if not specified. */ - "documentation": "https://doc.rust\-lang.org/stable/std", - /* The default edition of the package. - Note that individual targets may have different editions. - */ - "edition": "2018", - /* Optional string that is the name of a native library the package - is linking to. - */ - "links": null, - } - ], - /* Array of members of the workspace. - Each entry is the Package ID for the package. - */ - "workspace_members": [ - "my\-package 0.1.0 (path+file:///path/to/my\-package)", - ], - // The resolved dependency graph for the entire workspace. The enabled - // features are based on the enabled features for the "current" package. - // Inactivated optional dependencies are not listed. - // - // This is null if \-\-no\-deps is specified. - // - // By default, this includes all dependencies for all target platforms. - // The `\-\-filter\-platform` flag may be used to narrow to a specific - // target triple. - "resolve": { - /* Array of nodes within the dependency graph. - Each node is a package. - */ - "nodes": [ - { - /* The Package ID of this node. */ - "id": "my\-package 0.1.0 (path+file:///path/to/my\-package)", - /* The dependencies of this package, an array of Package IDs. */ - "dependencies": [ - "bitflags 1.0.4 (registry+https://github.com/rust\-lang/crates.io\-index)" - ], - /* The dependencies of this package. This is an alternative to - "dependencies" which contains additional information. In - particular, this handles renamed dependencies. - */ - "deps": [ - { - /* The name of the dependency's library target. - If this is a renamed dependency, this is the new - name. - */ - "name": "bitflags", - /* The Package ID of the dependency. */ - "pkg": "bitflags 1.0.4 (registry+https://github.com/rust\-lang/crates.io\-index)", - /* Array of dependency kinds. Added in Cargo 1.40. */ - "dep_kinds": [ - { - /* The dependency kind. - "dev", "build", or null for a normal dependency. - */ - "kind": null, - /* The target platform for the dependency. - null if not a target dependency. - */ - "target": "cfg(windows)" - } - ] - } - ], - /* Array of features enabled on this package. */ - "features": [ - "default" - ] - } - ], - /* The root package of the workspace. - This is null if this is a virtual workspace. Otherwise it is - the Package ID of the root package. - */ - "root": "my\-package 0.1.0 (path+file:///path/to/my\-package)" - }, - /* The absolute path to the build directory where Cargo places its output. */ - "target_directory": "/path/to/my\-package/target", - /* The version of the schema for this metadata structure. - This will be changed if incompatible changes are ever made. - */ - "version": 1, - /* The absolute path to the root of the workspace. */ - "workspace_root": "/path/to/my\-package" - /* Workspace metadata. - This is null if no metadata is specified. */ - "metadata": { - "docs": { - "rs": { - "all\-features": true - } - } - } -} -.fi -.RE -.SH "OPTIONS" -.SS "Output Options" -.sp -\fB\-\-no\-deps\fR -.RS 4 -Output information only about the workspace members and don't fetch -dependencies. -.RE -.sp -\fB\-\-format\-version\fR \fIversion\fR -.RS 4 -Specify the version of the output format to use. Currently \fB1\fR is the only -possible value. -.RE -.sp -\fB\-\-filter\-platform\fR \fItriple\fR -.RS 4 -This filters the \fBresolve\fR output to only include dependencies for the -given target triple. Without this flag, the resolve includes all targets. -.sp -Note that the dependencies listed in the "packages" array still includes all -dependencies. Each package definition is intended to be an unaltered -reproduction of the information within \fBCargo.toml\fR\&. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Output JSON about the current package: -.sp -.RS 4 -.nf -cargo metadata \-\-format\-version=1 -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-new.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-new.1 deleted file mode 100644 index 475f929c9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-new.1 +++ /dev/null @@ -1,146 +0,0 @@ -'\" t -.TH "CARGO\-NEW" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-new \- Create a new Cargo package -.SH "SYNOPSIS" -\fBcargo new\fR [\fIoptions\fR] \fIpath\fR -.SH "DESCRIPTION" -This command will create a new Cargo package in the given directory. This -includes a simple template with a \fBCargo.toml\fR manifest, sample source file, -and a VCS ignore file. If the directory is not already in a VCS repository, -then a new repository is created (see \fB\-\-vcs\fR below). -.sp -See \fBcargo\-init\fR(1) for a similar command which will create a new manifest -in an existing directory. -.SH "OPTIONS" -.SS "New Options" -.sp -\fB\-\-bin\fR -.RS 4 -Create a package with a binary target (\fBsrc/main.rs\fR). -This is the default behavior. -.RE -.sp -\fB\-\-lib\fR -.RS 4 -Create a package with a library target (\fBsrc/lib.rs\fR). -.RE -.sp -\fB\-\-edition\fR \fIedition\fR -.RS 4 -Specify the Rust edition to use. Default is 2021. -Possible values: 2015, 2018, 2021 -.RE -.sp -\fB\-\-name\fR \fIname\fR -.RS 4 -Set the package name. Defaults to the directory name. -.RE -.sp -\fB\-\-vcs\fR \fIvcs\fR -.RS 4 -Initialize a new VCS repository for the given version control system (git, -hg, pijul, or fossil) or do not initialize any version control at all -(none). If not specified, defaults to \fBgit\fR or the configuration value -\fBcargo\-new.vcs\fR, or \fBnone\fR if already inside a VCS repository. -.RE -.sp -\fB\-\-registry\fR \fIregistry\fR -.RS 4 -This sets the \fBpublish\fR field in \fBCargo.toml\fR to the given registry name -which will restrict publishing only to that registry. -.sp -Registry names are defined in \fICargo config files\fR \&. -If not specified, the default registry defined by the \fBregistry.default\fR -config key is used. If the default registry is not set and \fB\-\-registry\fR is not -used, the \fBpublish\fR field will not be set which means that publishing will not -be restricted. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Create a binary Cargo package in the given directory: -.sp -.RS 4 -.nf -cargo new foo -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-init\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-owner.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-owner.1 deleted file mode 100644 index 5ddba809c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-owner.1 +++ /dev/null @@ -1,177 +0,0 @@ -'\" t -.TH "CARGO\-OWNER" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-owner \- Manage the owners of a crate on the registry -.SH "SYNOPSIS" -\fBcargo owner\fR [\fIoptions\fR] \fB\-\-add\fR \fIlogin\fR [\fIcrate\fR] -.br -\fBcargo owner\fR [\fIoptions\fR] \fB\-\-remove\fR \fIlogin\fR [\fIcrate\fR] -.br -\fBcargo owner\fR [\fIoptions\fR] \fB\-\-list\fR [\fIcrate\fR] -.SH "DESCRIPTION" -This command will modify the owners for a crate on the registry. Owners of a -crate can upload new versions and yank old versions. Non\-team owners can also -modify the set of owners, so take care! -.sp -This command requires you to be authenticated with either the \fB\-\-token\fR option -or using \fBcargo\-login\fR(1). -.sp -If the crate name is not specified, it will use the package name from the -current directory. -.sp -See \fIthe reference\fR for more -information about owners and publishing. -.SH "OPTIONS" -.SS "Owner Options" -.sp -\fB\-a\fR, -\fB\-\-add\fR \fIlogin\fR\&... -.RS 4 -Invite the given user or team as an owner. -.RE -.sp -\fB\-r\fR, -\fB\-\-remove\fR \fIlogin\fR\&... -.RS 4 -Remove the given user or team as an owner. -.RE -.sp -\fB\-l\fR, -\fB\-\-list\fR -.RS 4 -List owners of a crate. -.RE -.sp -\fB\-\-token\fR \fItoken\fR -.RS 4 -API token to use when authenticating. This overrides the token stored in -the credentials file (which is created by \fBcargo\-login\fR(1)). -.sp -\fICargo config\fR environment variables can be -used to override the tokens stored in the credentials file. The token for -crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment -variable. Tokens for other registries may be specified with environment -variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name -of the registry in all capital letters. -.RE -.sp -\fB\-\-index\fR \fIindex\fR -.RS 4 -The URL of the registry index to use. -.RE -.sp -\fB\-\-registry\fR \fIregistry\fR -.RS 4 -Name of the registry to use. Registry names are defined in \fICargo config -files\fR \&. If not specified, the default registry is used, -which is defined by the \fBregistry.default\fR config key which defaults to -\fBcrates\-io\fR\&. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'List owners of a package: -.sp -.RS 4 -.nf -cargo owner \-\-list foo -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Invite an owner to a package: -.sp -.RS 4 -.nf -cargo owner \-\-add username foo -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Remove an owner from a package: -.sp -.RS 4 -.nf -cargo owner \-\-remove username foo -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-login\fR(1), \fBcargo\-publish\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-package.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-package.1 deleted file mode 100644 index 3258628f5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-package.1 +++ /dev/null @@ -1,322 +0,0 @@ -'\" t -.TH "CARGO\-PACKAGE" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-package \- Assemble the local package into a distributable tarball -.SH "SYNOPSIS" -\fBcargo package\fR [\fIoptions\fR] -.SH "DESCRIPTION" -This command will create a distributable, compressed \fB\&.crate\fR file with the -source code of the package in the current directory. The resulting file will -be stored in the \fBtarget/package\fR directory. This performs the following -steps: -.sp -.RS 4 -\h'-04' 1.\h'+01'Load and check the current workspace, performing some basic checks. -.sp -.RS 4 -\h'-04'\(bu\h'+02'Path dependencies are not allowed unless they have a version key. Cargo -will ignore the path key for dependencies in published packages. -\fBdev\-dependencies\fR do not have this restriction. -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Create the compressed \fB\&.crate\fR file. -.sp -.RS 4 -\h'-04'\(bu\h'+02'The original \fBCargo.toml\fR file is rewritten and normalized. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB[patch]\fR, \fB[replace]\fR, and \fB[workspace]\fR sections are removed from the -manifest. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCargo.lock\fR is automatically included if the package contains an -executable binary or example target. \fBcargo\-install\fR(1) will use the -packaged lock file if the \fB\-\-locked\fR flag is used. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'A \fB\&.cargo_vcs_info.json\fR file is included that contains information -about the current VCS checkout hash if available (not included with -\fB\-\-allow\-dirty\fR). -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Extract the \fB\&.crate\fR file and build it to verify it can build. -.sp -.RS 4 -\h'-04'\(bu\h'+02'This will rebuild your package from scratch to ensure that it can be -built from a pristine state. The \fB\-\-no\-verify\fR flag can be used to skip -this step. -.RE -.RE -.sp -.RS 4 -\h'-04' 4.\h'+01'Check that build scripts did not modify any source files. -.RE -.sp -The list of files included can be controlled with the \fBinclude\fR and \fBexclude\fR -fields in the manifest. -.sp -See \fIthe reference\fR for more details about -packaging and publishing. -.SS ".cargo_vcs_info.json format" -Will generate a \fB\&.cargo_vcs_info.json\fR in the following format -.sp -.RS 4 -.nf -{ - "git": { - "sha1": "aac20b6e7e543e6dd4118b246c77225e3a3a1302" - }, - "path_in_vcs": "" -} -.fi -.RE -.sp -\fBpath_in_vcs\fR will be set to a repo\-relative path for packages -in subdirectories of the version control repository. -.SH "OPTIONS" -.SS "Package Options" -.sp -\fB\-l\fR, -\fB\-\-list\fR -.RS 4 -Print files included in a package without making one. -.RE -.sp -\fB\-\-no\-verify\fR -.RS 4 -Don't verify the contents by building them. -.RE -.sp -\fB\-\-no\-metadata\fR -.RS 4 -Ignore warnings about a lack of human\-usable metadata (such as the description -or the license). -.RE -.sp -\fB\-\-allow\-dirty\fR -.RS 4 -Allow working directories with uncommitted VCS changes to be packaged. -.RE -.SS "Package Selection" -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. -.sp -The default members of a workspace can be set explicitly with the -\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Package only the specified packages. See \fBcargo\-pkgid\fR(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -.RE -.sp -\fB\-\-workspace\fR -.RS 4 -Package all members in the workspace. -.RE -.sp -\fB\-\-exclude\fR \fISPEC\fR\&... -.RS 4 -Exclude the specified packages. Must be used in conjunction with the -\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports -common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Package for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Create a compressed \fB\&.crate\fR file of the current package: -.sp -.RS 4 -.nf -cargo package -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-publish\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-pkgid.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-pkgid.1 deleted file mode 100644 index 67ed66f0b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-pkgid.1 +++ /dev/null @@ -1,223 +0,0 @@ -'\" t -.TH "CARGO\-PKGID" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-pkgid \- Print a fully qualified package specification -.SH "SYNOPSIS" -\fBcargo pkgid\fR [\fIoptions\fR] [\fIspec\fR] -.SH "DESCRIPTION" -Given a \fIspec\fR argument, print out the fully qualified package ID specifier -for a package or dependency in the current workspace. This command will -generate an error if \fIspec\fR is ambiguous as to which package it refers to in -the dependency graph. If no \fIspec\fR is given, then the specifier for the local -package is printed. -.sp -This command requires that a lockfile is available and dependencies have been -fetched. -.sp -A package specifier consists of a name, version, and source URL. You are -allowed to use partial specifiers to succinctly match a specific package as -long as it matches only one package. The format of a \fIspec\fR can be one of the -following: - -.TS -allbox tab(:); -lt lt. -T{ -SPEC Structure -T}:T{ -Example SPEC -T} -T{ -\fIname\fR -T}:T{ -\fBbitflags\fR -T} -T{ -\fIname\fR\fB:\fR\fIversion\fR -T}:T{ -\fBbitflags:1.0.4\fR -T} -T{ -\fIurl\fR -T}:T{ -\fBhttps://github.com/rust\-lang/cargo\fR -T} -T{ -\fIurl\fR\fB#\fR\fIversion\fR -T}:T{ -\fBhttps://github.com/rust\-lang/cargo#0.33.0\fR -T} -T{ -\fIurl\fR\fB#\fR\fIname\fR -T}:T{ -\fBhttps://github.com/rust\-lang/crates.io\-index#bitflags\fR -T} -T{ -\fIurl\fR\fB#\fR\fIname\fR\fB:\fR\fIversion\fR -T}:T{ -\fBhttps://github.com/rust\-lang/cargo#crates\-io:0.21.0\fR -T} -.TE -.sp -.SH "OPTIONS" -.SS "Package Selection" -.sp -\fB\-p\fR \fIspec\fR, -\fB\-\-package\fR \fIspec\fR -.RS 4 -Get the package ID for the given package instead of the current package. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Retrieve package specification for \fBfoo\fR package: -.sp -.RS 4 -.nf -cargo pkgid foo -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Retrieve package specification for version 1.0.0 of \fBfoo\fR: -.sp -.RS 4 -.nf -cargo pkgid foo:1.0.0 -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Retrieve package specification for \fBfoo\fR from crates.io: -.sp -.RS 4 -.nf -cargo pkgid https://github.com/rust\-lang/crates.io\-index#foo -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 4.\h'+01'Retrieve package specification for \fBfoo\fR from a local package: -.sp -.RS 4 -.nf -cargo pkgid file:///path/to/local/package#foo -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-generate\-lockfile\fR(1), \fBcargo\-metadata\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-publish.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-publish.1 deleted file mode 100644 index afd4e9139..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-publish.1 +++ /dev/null @@ -1,272 +0,0 @@ -'\" t -.TH "CARGO\-PUBLISH" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-publish \- Upload a package to the registry -.SH "SYNOPSIS" -\fBcargo publish\fR [\fIoptions\fR] -.SH "DESCRIPTION" -This command will create a distributable, compressed \fB\&.crate\fR file with the -source code of the package in the current directory and upload it to a -registry. The default registry is \&. This performs the -following steps: -.sp -.RS 4 -\h'-04' 1.\h'+01'Performs a few checks, including: -.sp -.RS 4 -\h'-04'\(bu\h'+02'Checks the \fBpackage.publish\fR key in the manifest for restrictions on -which registries you are allowed to publish to. -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Create a \fB\&.crate\fR file by following the steps in \fBcargo\-package\fR(1). -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Upload the crate to the registry. Note that the server will perform -additional checks on the crate. -.RE -.sp -This command requires you to be authenticated with either the \fB\-\-token\fR option -or using \fBcargo\-login\fR(1). -.sp -See \fIthe reference\fR for more details about -packaging and publishing. -.SH "OPTIONS" -.SS "Publish Options" -.sp -\fB\-\-dry\-run\fR -.RS 4 -Perform all checks without uploading. -.RE -.sp -\fB\-\-token\fR \fItoken\fR -.RS 4 -API token to use when authenticating. This overrides the token stored in -the credentials file (which is created by \fBcargo\-login\fR(1)). -.sp -\fICargo config\fR environment variables can be -used to override the tokens stored in the credentials file. The token for -crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment -variable. Tokens for other registries may be specified with environment -variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name -of the registry in all capital letters. -.RE -.sp -\fB\-\-no\-verify\fR -.RS 4 -Don't verify the contents by building them. -.RE -.sp -\fB\-\-allow\-dirty\fR -.RS 4 -Allow working directories with uncommitted VCS changes to be packaged. -.RE -.sp -\fB\-\-index\fR \fIindex\fR -.RS 4 -The URL of the registry index to use. -.RE -.sp -\fB\-\-registry\fR \fIregistry\fR -.RS 4 -Name of the registry to publish to. Registry names are defined in \fICargo -config files\fR \&. If not specified, and there is a -\fI\f(BIpackage.publish\fI\fR field in -\fBCargo.toml\fR with a single registry, then it will publish to that registry. -Otherwise it will use the default registry, which is defined by the -\fI\f(BIregistry.default\fI\fR config key -which defaults to \fBcrates\-io\fR\&. -.RE -.SS "Package Selection" -By default, the package in the current working directory is selected. The \fB\-p\fR -flag can be used to choose a different package in a workspace. -.sp -\fB\-p\fR \fIspec\fR, -\fB\-\-package\fR \fIspec\fR -.RS 4 -The package to publish. See \fBcargo\-pkgid\fR(1) for the SPEC -format. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Publish for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Publish the current package: -.sp -.RS 4 -.nf -cargo publish -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-package\fR(1), \fBcargo\-login\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-report.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-report.1 deleted file mode 100644 index d72bedbad..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-report.1 +++ /dev/null @@ -1,48 +0,0 @@ -'\" t -.TH "CARGO\-REPORT" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-report \- Generate and display various kinds of reports -.SH "SYNOPSIS" -\fBcargo report\fR \fItype\fR [\fIoptions\fR] -.SS "DESCRIPTION" -Displays a report of the given \fItype\fR \- currently, only \fBfuture\-incompat\fR is supported -.SH "OPTIONS" -.sp -\fB\-\-id\fR \fIid\fR -.RS 4 -Show the report with the specified Cargo\-generated id -.RE -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Only display a report for the specified package -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Display the latest future\-incompat report: -.sp -.RS 4 -.nf -cargo report future\-incompat -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Display the latest future\-incompat report for a specific package: -.sp -.RS 4 -.nf -cargo report future\-incompat \-\-package my\-dep:0.0.1 -.fi -.RE -.RE -.SH "SEE ALSO" -\fIFuture incompat report\fR -.sp -\fBcargo\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-run.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-run.1 deleted file mode 100644 index 5092ece9b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-run.1 +++ /dev/null @@ -1,289 +0,0 @@ -'\" t -.TH "CARGO\-RUN" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-run \- Run the current package -.SH "SYNOPSIS" -\fBcargo run\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR] -.SH "DESCRIPTION" -Run a binary or example of the local package. -.sp -All the arguments following the two dashes (\fB\-\-\fR) are passed to the binary to -run. If you're passing arguments to both Cargo and the binary, the ones after -\fB\-\-\fR go to the binary, the ones before go to Cargo. -.SH "OPTIONS" -.SS "Package Selection" -By default, the package in the current working directory is selected. The \fB\-p\fR -flag can be used to choose a different package in a workspace. -.sp -\fB\-p\fR \fIspec\fR, -\fB\-\-package\fR \fIspec\fR -.RS 4 -The package to run. See \fBcargo\-pkgid\fR(1) for the SPEC -format. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo run\fR will run the binary -target. If there are multiple binary targets, you must pass a target flag to -choose one. Or, the \fBdefault\-run\fR field may be specified in the \fB[package]\fR -section of \fBCargo.toml\fR to choose the name of the binary to run by default. -.sp -\fB\-\-bin\fR \fIname\fR -.RS 4 -Run the specified binary. -.RE -.sp -\fB\-\-example\fR \fIname\fR -.RS 4 -Run the specified example. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Run for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-r\fR, -\fB\-\-release\fR -.RS 4 -Run optimized artifacts with the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Run with the given profile. -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Run the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Build the local package and run its main target (assuming only one binary): -.sp -.RS 4 -.nf -cargo run -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Run an example with extra arguments: -.sp -.RS 4 -.nf -cargo run \-\-example exname \-\- \-\-exoption exarg1 exarg2 -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-build\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-rustc.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-rustc.1 deleted file mode 100644 index 7eecba83a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-rustc.1 +++ /dev/null @@ -1,393 +0,0 @@ -'\" t -.TH "CARGO\-RUSTC" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-rustc \- Compile the current package, and pass extra options to the compiler -.SH "SYNOPSIS" -\fBcargo rustc\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR] -.SH "DESCRIPTION" -The specified target for the current package (or package specified by \fB\-p\fR if -provided) will be compiled along with all of its dependencies. The specified -\fIargs\fR will all be passed to the final compiler invocation, not any of the -dependencies. Note that the compiler will still unconditionally receive -arguments such as \fB\-L\fR, \fB\-\-extern\fR, and \fB\-\-crate\-type\fR, and the specified -\fIargs\fR will simply be added to the compiler invocation. -.sp -See for documentation on rustc -flags. -.sp -This command requires that only one target is being compiled when additional -arguments are provided. If more than one target is available for the current -package the filters of \fB\-\-lib\fR, \fB\-\-bin\fR, etc, must be used to select which -target is compiled. -.sp -To pass flags to all compiler processes spawned by Cargo, use the \fBRUSTFLAGS\fR -\fIenvironment variable\fR or the -\fBbuild.rustflags\fR \fIconfig value\fR \&. -.SH "OPTIONS" -.SS "Package Selection" -By default, the package in the current working directory is selected. The \fB\-p\fR -flag can be used to choose a different package in a workspace. -.sp -\fB\-p\fR \fIspec\fR, -\fB\-\-package\fR \fIspec\fR -.RS 4 -The package to build. See \fBcargo\-pkgid\fR(1) for the SPEC -format. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo rustc\fR will build all -binary and library targets of the selected package. -.sp -Passing target selection flags will build only the specified -targets. -.sp -Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also -support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. -.sp -\fB\-\-lib\fR -.RS 4 -Build the package's library. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Build the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Build all binary targets. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Build the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Build all example targets. -.RE -.sp -\fB\-\-test\fR \fIname\fR\&... -.RS 4 -Build the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-tests\fR -.RS 4 -Build all targets in test mode that have the \fBtest = true\fR manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the \fBtest\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-bench\fR \fIname\fR\&... -.RS 4 -Build the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-benches\fR -.RS 4 -Build all targets in benchmark mode that have the \fBbench = true\fR -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the \fBbench\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-all\-targets\fR -.RS 4 -Build all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Build for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-r\fR, -\fB\-\-release\fR -.RS 4 -Build optimized artifacts with the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Build with the given profile. -.sp -The \fBrustc\fR subcommand will treat the following named profiles with special behaviors: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBcheck\fR \[em] Builds in the same way as the \fBcargo\-check\fR(1) command with -the \fBdev\fR profile. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBtest\fR \[em] Builds in the same way as the \fBcargo\-test\fR(1) command, -enabling building in test mode which will enable tests and enable the \fBtest\fR -cfg option. See \fIrustc -tests\fR for more detail. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBbench\fR \[em] Builds in the same was as the \fBcargo\-bench\fR(1) command, -similar to the \fBtest\fR profile. -.RE -.sp -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Build the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.sp -\fB\-\-future\-incompat\-report\fR -.RS 4 -Displays a future\-incompat report for any future\-incompatible warnings -produced during execution of this command -.sp -See \fBcargo\-report\fR(1) -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Check if your package (not including dependencies) uses unsafe code: -.sp -.RS 4 -.nf -cargo rustc \-\-lib \-\- \-D unsafe\-code -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Try an experimental flag on the nightly compiler, such as this which prints -the size of every type: -.sp -.RS 4 -.nf -cargo rustc \-\-lib \-\- \-Z print\-type\-sizes -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-build\fR(1), \fBrustc\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-rustdoc.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-rustdoc.1 deleted file mode 100644 index dff3e7e3b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-rustdoc.1 +++ /dev/null @@ -1,365 +0,0 @@ -'\" t -.TH "CARGO\-RUSTDOC" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-rustdoc \- Build a package's documentation, using specified custom flags -.SH "SYNOPSIS" -\fBcargo rustdoc\fR [\fIoptions\fR] [\fB\-\-\fR \fIargs\fR] -.SH "DESCRIPTION" -The specified target for the current package (or package specified by \fB\-p\fR if -provided) will be documented with the specified \fIargs\fR being passed to the -final rustdoc invocation. Dependencies will not be documented as part of this -command. Note that rustdoc will still unconditionally receive arguments such -as \fB\-L\fR, \fB\-\-extern\fR, and \fB\-\-crate\-type\fR, and the specified \fIargs\fR will simply -be added to the rustdoc invocation. -.sp -See for documentation on rustdoc -flags. -.sp -This command requires that only one target is being compiled when additional -arguments are provided. If more than one target is available for the current -package the filters of \fB\-\-lib\fR, \fB\-\-bin\fR, etc, must be used to select which -target is compiled. -.sp -To pass flags to all rustdoc processes spawned by Cargo, use the -\fBRUSTDOCFLAGS\fR \fIenvironment variable\fR -or the \fBbuild.rustdocflags\fR \fIconfig value\fR \&. -.SH "OPTIONS" -.SS "Documentation Options" -.sp -\fB\-\-open\fR -.RS 4 -Open the docs in a browser after building them. This will use your default -browser unless you define another one in the \fBBROWSER\fR environment variable -or use the \fI\f(BIdoc.browser\fI\fR configuration -option. -.RE -.SS "Package Selection" -By default, the package in the current working directory is selected. The \fB\-p\fR -flag can be used to choose a different package in a workspace. -.sp -\fB\-p\fR \fIspec\fR, -\fB\-\-package\fR \fIspec\fR -.RS 4 -The package to document. See \fBcargo\-pkgid\fR(1) for the SPEC -format. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo rustdoc\fR will document all -binary and library targets of the selected package. The binary will be skipped -if its name is the same as the lib target. Binaries are skipped if they have -\fBrequired\-features\fR that are missing. -.sp -Passing target selection flags will document only the specified -targets. -.sp -Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also -support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. -.sp -\fB\-\-lib\fR -.RS 4 -Document the package's library. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Document the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Document all binary targets. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Document the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Document all example targets. -.RE -.sp -\fB\-\-test\fR \fIname\fR\&... -.RS 4 -Document the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-tests\fR -.RS 4 -Document all targets in test mode that have the \fBtest = true\fR manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the \fBtest\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-bench\fR \fIname\fR\&... -.RS 4 -Document the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-benches\fR -.RS 4 -Document all targets in benchmark mode that have the \fBbench = true\fR -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the \fBbench\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-all\-targets\fR -.RS 4 -Document all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Document for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-r\fR, -\fB\-\-release\fR -.RS 4 -Document optimized artifacts with the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Document with the given profile. -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Document the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Build documentation with custom CSS included from a given file: -.sp -.RS 4 -.nf -cargo rustdoc \-\-lib \-\- \-\-extend\-css extra.css -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-doc\fR(1), \fBrustdoc\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-search.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-search.1 deleted file mode 100644 index 505fec118..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-search.1 +++ /dev/null @@ -1,118 +0,0 @@ -'\" t -.TH "CARGO\-SEARCH" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-search \- Search packages in crates.io -.SH "SYNOPSIS" -\fBcargo search\fR [\fIoptions\fR] [\fIquery\fR\&...] -.SH "DESCRIPTION" -This performs a textual search for crates on \&. The matching -crates will be displayed along with their description in TOML format suitable -for copying into a \fBCargo.toml\fR manifest. -.SH "OPTIONS" -.SS "Search Options" -.sp -\fB\-\-limit\fR \fIlimit\fR -.RS 4 -Limit the number of results (default: 10, max: 100). -.RE -.sp -\fB\-\-index\fR \fIindex\fR -.RS 4 -The URL of the registry index to use. -.RE -.sp -\fB\-\-registry\fR \fIregistry\fR -.RS 4 -Name of the registry to use. Registry names are defined in \fICargo config -files\fR \&. If not specified, the default registry is used, -which is defined by the \fBregistry.default\fR config key which defaults to -\fBcrates\-io\fR\&. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Search for a package from crates.io: -.sp -.RS 4 -.nf -cargo search serde -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-install\fR(1), \fBcargo\-publish\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-test.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-test.1 deleted file mode 100644 index 4be33d5bf..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-test.1 +++ /dev/null @@ -1,511 +0,0 @@ -'\" t -.TH "CARGO\-TEST" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-test \- Execute unit and integration tests of a package -.SH "SYNOPSIS" -\fBcargo test\fR [\fIoptions\fR] [\fItestname\fR] [\fB\-\-\fR \fItest\-options\fR] -.SH "DESCRIPTION" -Compile and execute unit and integration tests. -.sp -The test filtering argument \fBTESTNAME\fR and all the arguments following the two -dashes (\fB\-\-\fR) are passed to the test binaries and thus to \fIlibtest\fR (rustc's -built in unit\-test and micro\-benchmarking framework). If you're passing -arguments to both Cargo and the binary, the ones after \fB\-\-\fR go to the binary, -the ones before go to Cargo. For details about libtest's arguments see the -output of \fBcargo test \-\- \-\-help\fR and check out the rustc book's chapter on -how tests work at \&. -.sp -As an example, this will filter for tests with \fBfoo\fR in their name and run them -on 3 threads in parallel: -.sp -.RS 4 -.nf -cargo test foo \-\- \-\-test\-threads 3 -.fi -.RE -.sp -Tests are built with the \fB\-\-test\fR option to \fBrustc\fR which creates an -executable with a \fBmain\fR function that automatically runs all functions -annotated with the \fB#[test]\fR attribute in multiple threads. \fB#[bench]\fR -annotated functions will also be run with one iteration to verify that they -are functional. -.sp -The libtest harness may be disabled by setting \fBharness = false\fR in the target -manifest settings, in which case your code will need to provide its own \fBmain\fR -function to handle running tests. -.sp -Documentation tests are also run by default, which is handled by \fBrustdoc\fR\&. It -extracts code samples from documentation comments and executes them. See the -\fIrustdoc book\fR for more information on -writing doc tests. -.SH "OPTIONS" -.SS "Test Options" -.sp -\fB\-\-no\-run\fR -.RS 4 -Compile, but don't run tests. -.RE -.sp -\fB\-\-no\-fail\-fast\fR -.RS 4 -Run all tests regardless of failure. Without this flag, Cargo will exit -after the first executable fails. The Rust test harness will run all tests -within the executable to completion, this flag only applies to the executable -as a whole. -.RE -.SS "Package Selection" -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. -.sp -The default members of a workspace can be set explicitly with the -\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Test only the specified packages. See \fBcargo\-pkgid\fR(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -.RE -.sp -\fB\-\-workspace\fR -.RS 4 -Test all members in the workspace. -.RE -.sp -\fB\-\-all\fR -.RS 4 -Deprecated alias for \fB\-\-workspace\fR\&. -.RE -.sp -\fB\-\-exclude\fR \fISPEC\fR\&... -.RS 4 -Exclude the specified packages. Must be used in conjunction with the -\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports -common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -.RE -.SS "Target Selection" -When no target selection options are given, \fBcargo test\fR will build the -following targets of the selected packages: -.sp -.RS 4 -\h'-04'\(bu\h'+02'lib \[em] used to link with binaries, examples, integration tests, and doc tests -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'bins (only if integration tests are built and required features are -available) -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'examples \[em] to ensure they compile -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'lib as a unit test -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'bins as unit tests -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'integration tests -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'doc tests for the lib target -.RE -.sp -The default behavior can be changed by setting the \fBtest\fR flag for the target -in the manifest settings. Setting examples to \fBtest = true\fR will build and run -the example as a test. Setting targets to \fBtest = false\fR will stop them from -being tested by default. Target selection options that take a target by name -ignore the \fBtest\fR flag and will always test the given target. -.sp -Doc tests for libraries may be disabled by setting \fBdoctest = false\fR for the -library in the manifest. -.sp -Binary targets are automatically built if there is an integration test or -benchmark. This allows an integration test to execute the binary to exercise -and test its behavior. The \fBCARGO_BIN_EXE_\fR -\fIenvironment variable\fR -is set when the integration test is built so that it can use the -\fI\f(BIenv\fI macro\fR to locate the -executable. -.sp -Passing target selection flags will test only the specified -targets. -.sp -Note that \fB\-\-bin\fR, \fB\-\-example\fR, \fB\-\-test\fR and \fB\-\-bench\fR flags also -support common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your -shell accidentally expanding glob patterns before Cargo handles them, you must -use single quotes or double quotes around each glob pattern. -.sp -\fB\-\-lib\fR -.RS 4 -Test the package's library. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Test the specified binary. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-bins\fR -.RS 4 -Test all binary targets. -.RE -.sp -\fB\-\-example\fR \fIname\fR\&... -.RS 4 -Test the specified example. This flag may be specified multiple times -and supports common Unix glob patterns. -.RE -.sp -\fB\-\-examples\fR -.RS 4 -Test all example targets. -.RE -.sp -\fB\-\-test\fR \fIname\fR\&... -.RS 4 -Test the specified integration test. This flag may be specified -multiple times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-tests\fR -.RS 4 -Test all targets in test mode that have the \fBtest = true\fR manifest -flag set. By default this includes the library and binaries built as -unittests, and integration tests. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -unittest, and once as a dependency for binaries, integration tests, etc.). -Targets may be enabled or disabled by setting the \fBtest\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-bench\fR \fIname\fR\&... -.RS 4 -Test the specified benchmark. This flag may be specified multiple -times and supports common Unix glob patterns. -.RE -.sp -\fB\-\-benches\fR -.RS 4 -Test all targets in benchmark mode that have the \fBbench = true\fR -manifest flag set. By default this includes the library and binaries built -as benchmarks, and bench targets. Be aware that this will also build any -required dependencies, so the lib target may be built twice (once as a -benchmark, and once as a dependency for binaries, benchmarks, etc.). -Targets may be enabled or disabled by setting the \fBbench\fR flag in the -manifest settings for the target. -.RE -.sp -\fB\-\-all\-targets\fR -.RS 4 -Test all targets. This is equivalent to specifying \fB\-\-lib \-\-bins \-\-tests \-\-benches \-\-examples\fR\&. -.RE -.sp -\fB\-\-doc\fR -.RS 4 -Test only the library's documentation. This cannot be mixed with other -target options. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Compilation Options" -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Test for the given architecture. The default is the host -architecture. The general format of the triple is -\fB\-\-\-\fR\&. Run \fBrustc \-\-print target\-list\fR for a -list of supported targets. -.sp -This may also be specified with the \fBbuild.target\fR -\fIconfig value\fR \&. -.sp -Note that specifying this flag makes Cargo run in a different mode where the -target artifacts are placed in a separate directory. See the -\fIbuild cache\fR documentation for more details. -.RE -.sp -\fB\-r\fR, -\fB\-\-release\fR -.RS 4 -Test optimized artifacts with the \fBrelease\fR profile. -See also the \fB\-\-profile\fR option for choosing a specific profile by name. -.RE -.sp -\fB\-\-profile\fR \fIname\fR -.RS 4 -Test with the given profile. -See the \fIthe reference\fR for more details on profiles. -.RE -.sp -\fB\-\-ignore\-rust\-version\fR -.RS 4 -Test the target even if the selected Rust compiler is older than the -required Rust version as configured in the project's \fBrust\-version\fR field. -.RE -.SS "Output Options" -.sp -\fB\-\-target\-dir\fR \fIdirectory\fR -.RS 4 -Directory for all generated artifacts and intermediate files. May also be -specified with the \fBCARGO_TARGET_DIR\fR environment variable, or the -\fBbuild.target\-dir\fR \fIconfig value\fR \&. -Defaults to \fBtarget\fR in the root of the workspace. -.RE -.SS "Display Options" -By default the Rust test harness hides output from test execution to keep -results readable. Test output can be recovered (e.g., for debugging) by passing -\fB\-\-nocapture\fR to the test binaries: -.sp -.RS 4 -.nf -cargo test \-\- \-\-nocapture -.fi -.RE -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-message\-format\fR \fIfmt\fR -.RS 4 -The output format for diagnostic messages. Can be specified multiple times -and consists of comma\-separated values. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBhuman\fR (default): Display in a human\-readable text format. Conflicts with -\fBshort\fR and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBshort\fR: Emit shorter, human\-readable text messages. Conflicts with \fBhuman\fR -and \fBjson\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\fR: Emit JSON messages to stdout. See -\fIthe reference\fR -for more details. Conflicts with \fBhuman\fR and \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-short\fR: Ensure the \fBrendered\fR field of JSON messages contains -the "short" rendering from rustc. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-diagnostic\-rendered\-ansi\fR: Ensure the \fBrendered\fR field of JSON messages -contains embedded ANSI color codes for respecting rustc's default color -scheme. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBjson\-render\-diagnostics\fR: Instruct Cargo to not include rustc diagnostics in -in JSON messages printed, but instead Cargo itself should render the -JSON diagnostics coming from rustc. Cargo's own JSON diagnostics and others -coming from rustc are still emitted. Cannot be used with \fBhuman\fR or \fBshort\fR\&. -.RE -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SS "Miscellaneous Options" -The \fB\-\-jobs\fR argument affects the building of the test executable but does not -affect how many threads are used when running the tests. The Rust test harness -includes an option to control the number of threads used: -.sp -.RS 4 -.nf -cargo test \-j 2 \-\- \-\-test\-threads=2 -.fi -.RE -.sp -\fB\-j\fR \fIN\fR, -\fB\-\-jobs\fR \fIN\fR -.RS 4 -Number of parallel jobs to run. May also be specified with the -\fBbuild.jobs\fR \fIconfig value\fR \&. Defaults to -the number of CPUs. -.RE -.sp -\fB\-\-future\-incompat\-report\fR -.RS 4 -Displays a future\-incompat report for any future\-incompatible warnings -produced during execution of this command -.sp -See \fBcargo\-report\fR(1) -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Execute all the unit and integration tests of the current package: -.sp -.RS 4 -.nf -cargo test -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Run only tests whose names match against a filter string: -.sp -.RS 4 -.nf -cargo test name_filter -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Run only a specific test within a specific integration test: -.sp -.RS 4 -.nf -cargo test \-\-test int_test_name \-\- modname::test_name -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-bench\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-tree.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-tree.1 deleted file mode 100644 index c3bd42862..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-tree.1 +++ /dev/null @@ -1,471 +0,0 @@ -'\" t -.TH "CARGO\-TREE" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-tree \- Display a tree visualization of a dependency graph -.SH "SYNOPSIS" -\fBcargo tree\fR [\fIoptions\fR] -.SH "DESCRIPTION" -This command will display a tree of dependencies to the terminal. An example -of a simple project that depends on the "rand" package: -.sp -.RS 4 -.nf -myproject v0.1.0 (/myproject) -`\-\- rand v0.7.3 - |\-\- getrandom v0.1.14 - | |\-\- cfg\-if v0.1.10 - | `\-\- libc v0.2.68 - |\-\- libc v0.2.68 (*) - |\-\- rand_chacha v0.2.2 - | |\-\- ppv\-lite86 v0.2.6 - | `\-\- rand_core v0.5.1 - | `\-\- getrandom v0.1.14 (*) - `\-\- rand_core v0.5.1 (*) -[build\-dependencies] -`\-\- cc v1.0.50 -.fi -.RE -.sp -Packages marked with \fB(*)\fR have been "de\-duplicated". The dependencies for the -package have already been shown elsewhere in the graph, and so are not -repeated. Use the \fB\-\-no\-dedupe\fR option to repeat the duplicates. -.sp -The \fB\-e\fR flag can be used to select the dependency kinds to display. The -"features" kind changes the output to display the features enabled by -each dependency. For example, \fBcargo tree \-e features\fR: -.sp -.RS 4 -.nf -myproject v0.1.0 (/myproject) -`\-\- log feature "serde" - `\-\- log v0.4.8 - |\-\- serde v1.0.106 - `\-\- cfg\-if feature "default" - `\-\- cfg\-if v0.1.10 -.fi -.RE -.sp -In this tree, \fBmyproject\fR depends on \fBlog\fR with the \fBserde\fR feature. \fBlog\fR in -turn depends on \fBcfg\-if\fR with "default" features. When using \fB\-e features\fR it -can be helpful to use \fB\-i\fR flag to show how the features flow into a package. -See the examples below for more detail. -.SH "OPTIONS" -.SS "Tree Options" -.sp -\fB\-i\fR \fIspec\fR, -\fB\-\-invert\fR \fIspec\fR -.RS 4 -Show the reverse dependencies for the given package. This flag will invert -the tree and display the packages that depend on the given package. -.sp -Note that in a workspace, by default it will only display the package's -reverse dependencies inside the tree of the workspace member in the current -directory. The \fB\-\-workspace\fR flag can be used to extend it so that it will -show the package's reverse dependencies across the entire workspace. The \fB\-p\fR -flag can be used to display the package's reverse dependencies only with the -subtree of the package given to \fB\-p\fR\&. -.RE -.sp -\fB\-\-prune\fR \fIspec\fR -.RS 4 -Prune the given package from the display of the dependency tree. -.RE -.sp -\fB\-\-depth\fR \fIdepth\fR -.RS 4 -Maximum display depth of the dependency tree. A depth of 1 displays the direct -dependencies, for example. -.RE -.sp -\fB\-\-no\-dedupe\fR -.RS 4 -Do not de\-duplicate repeated dependencies. Usually, when a package has already -displayed its dependencies, further occurrences will not re\-display its -dependencies, and will include a \fB(*)\fR to indicate it has already been shown. -This flag will cause those duplicates to be repeated. -.RE -.sp -\fB\-d\fR, -\fB\-\-duplicates\fR -.RS 4 -Show only dependencies which come in multiple versions (implies \fB\-\-invert\fR). -When used with the \fB\-p\fR flag, only shows duplicates within the subtree of the -given package. -.sp -It can be beneficial for build times and executable sizes to avoid building -that same package multiple times. This flag can help identify the offending -packages. You can then investigate if the package that depends on the -duplicate with the older version can be updated to the newer version so that -only one instance is built. -.RE -.sp -\fB\-e\fR \fIkinds\fR, -\fB\-\-edges\fR \fIkinds\fR -.RS 4 -The dependency kinds to display. Takes a comma separated list of values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBall\fR \[em] Show all edge kinds. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnormal\fR \[em] Show normal dependencies. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBbuild\fR \[em] Show build dependencies. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBdev\fR \[em] Show development dependencies. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBfeatures\fR \[em] Show features enabled by each dependency. If this is the only -kind given, then it will automatically include the other dependency kinds. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBno\-normal\fR \[em] Do not include normal dependencies. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBno\-build\fR \[em] Do not include build dependencies. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBno\-dev\fR \[em] Do not include development dependencies. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBno\-proc\-macro\fR \[em] Do not include procedural macro dependencies. -.RE -.sp -The \fBnormal\fR, \fBbuild\fR, \fBdev\fR, and \fBall\fR dependency kinds cannot be mixed with -\fBno\-normal\fR, \fBno\-build\fR, or \fBno\-dev\fR dependency kinds. -.sp -The default is \fBnormal,build,dev\fR\&. -.RE -.sp -\fB\-\-target\fR \fItriple\fR -.RS 4 -Filter dependencies matching the given target\-triple. The default is the host -platform. Use the value \fBall\fR to include \fIall\fR targets. -.RE -.SS "Tree Formatting Options" -.sp -\fB\-\-charset\fR \fIcharset\fR -.RS 4 -Chooses the character set to use for the tree. Valid values are "utf8" or -"ascii". Default is "utf8". -.RE -.sp -\fB\-f\fR \fIformat\fR, -\fB\-\-format\fR \fIformat\fR -.RS 4 -Set the format string for each package. The default is "{p}". -.sp -This is an arbitrary string which will be used to display each package. The following -strings will be replaced with the corresponding value: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB{p}\fR \[em] The package name. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB{l}\fR \[em] The package license. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB{r}\fR \[em] The package repository URL. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB{f}\fR \[em] Comma\-separated list of package features that are enabled. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB{lib}\fR \[em] The name, as used in a \fBuse\fR statement, of the package's library. -.RE -.RE -.sp -\fB\-\-prefix\fR \fIprefix\fR -.RS 4 -Sets how each line is displayed. The \fIprefix\fR value can be one of: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBindent\fR (default) \[em] Shows each line indented as a tree. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBdepth\fR \[em] Show as a list, with the numeric depth printed before each entry. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnone\fR \[em] Show as a flat list. -.RE -.RE -.SS "Package Selection" -By default, when no package selection options are given, the packages selected -depend on the selected manifest file (based on the current working directory if -\fB\-\-manifest\-path\fR is not given). If the manifest is the root of a workspace then -the workspaces default members are selected, otherwise only the package defined -by the manifest will be selected. -.sp -The default members of a workspace can be set explicitly with the -\fBworkspace.default\-members\fR key in the root manifest. If this is not set, a -virtual workspace will include all workspace members (equivalent to passing -\fB\-\-workspace\fR), and a non\-virtual workspace will include only the root crate itself. -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Display only the specified packages. See \fBcargo\-pkgid\fR(1) for the -SPEC format. This flag may be specified multiple times and supports common Unix -glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell accidentally -expanding glob patterns before Cargo handles them, you must use single quotes or -double quotes around each pattern. -.RE -.sp -\fB\-\-workspace\fR -.RS 4 -Display all members in the workspace. -.RE -.sp -\fB\-\-exclude\fR \fISPEC\fR\&... -.RS 4 -Exclude the specified packages. Must be used in conjunction with the -\fB\-\-workspace\fR flag. This flag may be specified multiple times and supports -common Unix glob patterns like \fB*\fR, \fB?\fR and \fB[]\fR\&. However, to avoid your shell -accidentally expanding glob patterns before Cargo handles them, you must use -single quotes or double quotes around each pattern. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Feature Selection" -The feature flags allow you to control which features are enabled. When no -feature options are given, the \fBdefault\fR feature is activated for every -selected package. -.sp -See \fIthe features documentation\fR -for more details. -.sp -\fB\-\-features\fR \fIfeatures\fR -.RS 4 -Space or comma separated list of features to activate. Features of workspace -members may be enabled with \fBpackage\-name/feature\-name\fR syntax. This flag may -be specified multiple times, which enables all specified features. -.RE -.sp -\fB\-\-all\-features\fR -.RS 4 -Activate all available features of all selected packages. -.RE -.sp -\fB\-\-no\-default\-features\fR -.RS 4 -Do not activate the \fBdefault\fR feature of the selected packages. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Display the tree for the package in the current directory: -.sp -.RS 4 -.nf -cargo tree -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Display all the packages that depend on the \fBsyn\fR package: -.sp -.RS 4 -.nf -cargo tree \-i syn -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Show the features enabled on each package: -.sp -.RS 4 -.nf -cargo tree \-\-format "{p} {f}" -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 4.\h'+01'Show all packages that are built multiple times. This can happen if multiple -semver\-incompatible versions appear in the tree (like 1.0.0 and 2.0.0). -.sp -.RS 4 -.nf -cargo tree \-d -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 5.\h'+01'Explain why features are enabled for the \fBsyn\fR package: -.sp -.RS 4 -.nf -cargo tree \-e features \-i syn -.fi -.RE -.sp -The \fB\-e features\fR flag is used to show features. The \fB\-i\fR flag is used to -invert the graph so that it displays the packages that depend on \fBsyn\fR\&. An -example of what this would display: -.sp -.RS 4 -.nf -syn v1.0.17 -|\-\- syn feature "clone\-impls" -| `\-\- syn feature "default" -| `\-\- rustversion v1.0.2 -| `\-\- rustversion feature "default" -| `\-\- myproject v0.1.0 (/myproject) -| `\-\- myproject feature "default" (command\-line) -|\-\- syn feature "default" (*) -|\-\- syn feature "derive" -| `\-\- syn feature "default" (*) -|\-\- syn feature "full" -| `\-\- rustversion v1.0.2 (*) -|\-\- syn feature "parsing" -| `\-\- syn feature "default" (*) -|\-\- syn feature "printing" -| `\-\- syn feature "default" (*) -|\-\- syn feature "proc\-macro" -| `\-\- syn feature "default" (*) -`\-\- syn feature "quote" - |\-\- syn feature "printing" (*) - `\-\- syn feature "proc\-macro" (*) -.fi -.RE -.sp -To read this graph, you can follow the chain for each feature from the root -to see why it is included. For example, the "full" feature is added by the -\fBrustversion\fR crate which is included from \fBmyproject\fR (with the default -features), and \fBmyproject\fR is the package selected on the command\-line. All -of the other \fBsyn\fR features are added by the "default" feature ("quote" is -added by "printing" and "proc\-macro", both of which are default features). -.sp -If you're having difficulty cross\-referencing the de\-duplicated \fB(*)\fR -entries, try with the \fB\-\-no\-dedupe\fR flag to get the full output. -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-metadata\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-uninstall.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-uninstall.1 deleted file mode 100644 index 610537795..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-uninstall.1 +++ /dev/null @@ -1,141 +0,0 @@ -'\" t -.TH "CARGO\-UNINSTALL" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-uninstall \- Remove a Rust binary -.SH "SYNOPSIS" -\fBcargo uninstall\fR [\fIoptions\fR] [\fIspec\fR\&...] -.SH "DESCRIPTION" -This command removes a package installed with \fBcargo\-install\fR(1). The \fIspec\fR -argument is a package ID specification of the package to remove (see -\fBcargo\-pkgid\fR(1)). -.sp -By default all binaries are removed for a crate but the \fB\-\-bin\fR and -\fB\-\-example\fR flags can be used to only remove particular binaries. -.sp -The installation root is determined, in order of precedence: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB\-\-root\fR option -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCARGO_INSTALL_ROOT\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBinstall.root\fR Cargo \fIconfig value\fR -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBCARGO_HOME\fR environment variable -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB$HOME/.cargo\fR -.RE -.SH "OPTIONS" -.SS "Install Options" -.sp -\fB\-p\fR, -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Package to uninstall. -.RE -.sp -\fB\-\-bin\fR \fIname\fR\&... -.RS 4 -Only uninstall the binary \fIname\fR\&. -.RE -.sp -\fB\-\-root\fR \fIdir\fR -.RS 4 -Directory to uninstall packages from. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Uninstall a previously installed package. -.sp -.RS 4 -.nf -cargo uninstall ripgrep -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-install\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-update.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-update.1 deleted file mode 100644 index c5c9e686f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-update.1 +++ /dev/null @@ -1,198 +0,0 @@ -'\" t -.TH "CARGO\-UPDATE" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-update \- Update dependencies as recorded in the local lock file -.SH "SYNOPSIS" -\fBcargo update\fR [\fIoptions\fR] -.SH "DESCRIPTION" -This command will update dependencies in the \fBCargo.lock\fR file to the latest -version. If the \fBCargo.lock\fR file does not exist, it will be created with the -latest available versions. -.SH "OPTIONS" -.SS "Update Options" -.sp -\fB\-p\fR \fIspec\fR\&..., -\fB\-\-package\fR \fIspec\fR\&... -.RS 4 -Update only the specified packages. This flag may be specified -multiple times. See \fBcargo\-pkgid\fR(1) for the SPEC format. -.sp -If packages are specified with the \fB\-p\fR flag, then a conservative update of -the lockfile will be performed. This means that only the dependency specified -by SPEC will be updated. Its transitive dependencies will be updated only if -SPEC cannot be updated without updating dependencies. All other dependencies -will remain locked at their currently recorded versions. -.sp -If \fB\-p\fR is not specified, all dependencies are updated. -.RE -.sp -\fB\-\-aggressive\fR -.RS 4 -When used with \fB\-p\fR, dependencies of \fIspec\fR are forced to update as well. -Cannot be used with \fB\-\-precise\fR\&. -.RE -.sp -\fB\-\-precise\fR \fIprecise\fR -.RS 4 -When used with \fB\-p\fR, allows you to specify a specific version number to set -the package to. If the package comes from a git repository, this can be a git -revision (such as a SHA hash or tag). -.RE -.sp -\fB\-w\fR, -\fB\-\-workspace\fR -.RS 4 -Attempt to update only packages defined in the workspace. Other packages -are updated only if they don't already exist in the lockfile. This -option is useful for updating \fBCargo.lock\fR after you've changed version -numbers in \fBCargo.toml\fR\&. -.RE -.sp -\fB\-\-dry\-run\fR -.RS 4 -Displays what would be updated, but doesn't actually write the lockfile. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Update all dependencies in the lockfile: -.sp -.RS 4 -.nf -cargo update -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Update only specific dependencies: -.sp -.RS 4 -.nf -cargo update \-p foo \-p bar -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Set a specific dependency to a specific version: -.sp -.RS 4 -.nf -cargo update \-p foo \-\-precise 1.2.3 -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-generate\-lockfile\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-vendor.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-vendor.1 deleted file mode 100644 index eeaf74046..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-vendor.1 +++ /dev/null @@ -1,189 +0,0 @@ -'\" t -.TH "CARGO\-VENDOR" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-vendor \- Vendor all dependencies locally -.SH "SYNOPSIS" -\fBcargo vendor\fR [\fIoptions\fR] [\fIpath\fR] -.SH "DESCRIPTION" -This cargo subcommand will vendor all crates.io and git dependencies for a -project into the specified directory at \fB\fR\&. After this command completes -the vendor directory specified by \fB\fR will contain all remote sources from -dependencies specified. Additional manifests beyond the default one can be -specified with the \fB\-s\fR option. -.sp -The \fBcargo vendor\fR command will also print out the configuration necessary -to use the vendored sources, which you will need to add to \fB\&.cargo/config.toml\fR\&. -.SH "OPTIONS" -.SS "Vendor Options" -.sp -\fB\-s\fR \fImanifest\fR, -\fB\-\-sync\fR \fImanifest\fR -.RS 4 -Specify extra \fBCargo.toml\fR manifests to workspaces which should also be -vendored and synced to the output. -.RE -.sp -\fB\-\-no\-delete\fR -.RS 4 -Don't delete the "vendor" directory when vendoring, but rather keep all -existing contents of the vendor directory -.RE -.sp -\fB\-\-respect\-source\-config\fR -.RS 4 -Instead of ignoring \fB[source]\fR configuration by default in \fB\&.cargo/config.toml\fR -read it and use it when downloading crates from crates.io, for example -.RE -.sp -\fB\-\-versioned\-dirs\fR -.RS 4 -Normally versions are only added to disambiguate multiple versions of the -same package. This option causes all directories in the "vendor" directory -to be versioned, which makes it easier to track the history of vendored -packages over time, and can help with the performance of re\-vendoring when -only a subset of the packages have changed. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Vendor all dependencies into a local "vendor" folder -.sp -.RS 4 -.nf -cargo vendor -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Vendor all dependencies into a local "third\-party/vendor" folder -.sp -.RS 4 -.nf -cargo vendor third\-party/vendor -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Vendor the current workspace as well as another to "vendor" -.sp -.RS 4 -.nf -cargo vendor \-s ../path/to/Cargo.toml -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-verify-project.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-verify-project.1 deleted file mode 100644 index 000ccad92..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-verify-project.1 +++ /dev/null @@ -1,148 +0,0 @@ -'\" t -.TH "CARGO\-VERIFY\-PROJECT" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-verify\-project \- Check correctness of crate manifest -.SH "SYNOPSIS" -\fBcargo verify\-project\fR [\fIoptions\fR] -.SH "DESCRIPTION" -This command will parse the local manifest and check its validity. It emits a -JSON object with the result. A successful validation will display: -.sp -.RS 4 -.nf -{"success":"true"} -.fi -.RE -.sp -An invalid workspace will display: -.sp -.RS 4 -.nf -{"invalid":"human\-readable error message"} -.fi -.RE -.SH "OPTIONS" -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-manifest\-path\fR \fIpath\fR -.RS 4 -Path to the \fBCargo.toml\fR file. By default, Cargo searches for the -\fBCargo.toml\fR file in the current directory or any parent directory. -.RE -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: The workspace is OK. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB1\fR: The workspace is invalid. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Check the current workspace for errors: -.sp -.RS 4 -.nf -cargo verify\-project -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-package\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-version.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-version.1 deleted file mode 100644 index cabd8b2d2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-version.1 +++ /dev/null @@ -1,52 +0,0 @@ -'\" t -.TH "CARGO\-VERSION" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-version \- Show version information -.SH "SYNOPSIS" -\fBcargo version\fR [\fIoptions\fR] -.SH "DESCRIPTION" -Displays the version of Cargo. -.SH "OPTIONS" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Display additional version information. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Display the version: -.sp -.RS 4 -.nf -cargo version -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'The version is also available via flags: -.sp -.RS 4 -.nf -cargo \-\-version -cargo \-V -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Display extra version information: -.sp -.RS 4 -.nf -cargo \-Vv -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-yank.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-yank.1 deleted file mode 100644 index 053fbbdca..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo-yank.1 +++ /dev/null @@ -1,146 +0,0 @@ -'\" t -.TH "CARGO\-YANK" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo\-yank \- Remove a pushed crate from the index -.SH "SYNOPSIS" -\fBcargo yank\fR [\fIoptions\fR] \fB\-\-vers\fR \fIversion\fR [\fIcrate\fR] -.SH "DESCRIPTION" -The yank command removes a previously published crate's version from the -server's index. This command does not delete any data, and the crate will -still be available for download via the registry's download link. -.sp -Note that existing crates locked to a yanked version will still be able to -download the yanked version to use it. Cargo will, however, not allow any new -crates to be locked to any yanked version. -.sp -This command requires you to be authenticated with either the \fB\-\-token\fR option -or using \fBcargo\-login\fR(1). -.sp -If the crate name is not specified, it will use the package name from the -current directory. -.SH "OPTIONS" -.SS "Yank Options" -.sp -\fB\-\-vers\fR \fIversion\fR -.RS 4 -The version to yank or un\-yank. -.RE -.sp -\fB\-\-undo\fR -.RS 4 -Undo a yank, putting a version back into the index. -.RE -.sp -\fB\-\-token\fR \fItoken\fR -.RS 4 -API token to use when authenticating. This overrides the token stored in -the credentials file (which is created by \fBcargo\-login\fR(1)). -.sp -\fICargo config\fR environment variables can be -used to override the tokens stored in the credentials file. The token for -crates.io may be specified with the \fBCARGO_REGISTRY_TOKEN\fR environment -variable. Tokens for other registries may be specified with environment -variables of the form \fBCARGO_REGISTRIES_NAME_TOKEN\fR where \fBNAME\fR is the name -of the registry in all capital letters. -.RE -.sp -\fB\-\-index\fR \fIindex\fR -.RS 4 -The URL of the registry index to use. -.RE -.sp -\fB\-\-registry\fR \fIregistry\fR -.RS 4 -Name of the registry to use. Registry names are defined in \fICargo config -files\fR \&. If not specified, the default registry is used, -which is defined by the \fBregistry.default\fR config key which defaults to -\fBcrates\-io\fR\&. -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Yank a crate from the index: -.sp -.RS 4 -.nf -cargo yank \-\-vers 1.0.7 foo -.fi -.RE -.RE -.SH "SEE ALSO" -\fBcargo\fR(1), \fBcargo\-login\fR(1), \fBcargo\-publish\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo.1 b/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo.1 deleted file mode 100644 index 8f1616957..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/src/etc/man/cargo.1 +++ /dev/null @@ -1,372 +0,0 @@ -'\" t -.TH "CARGO" "1" -.nh -.ad l -.ss \n[.ss] 0 -.SH "NAME" -cargo \- The Rust package manager -.SH "SYNOPSIS" -\fBcargo\fR [\fIoptions\fR] \fIcommand\fR [\fIargs\fR] -.br -\fBcargo\fR [\fIoptions\fR] \fB\-\-version\fR -.br -\fBcargo\fR [\fIoptions\fR] \fB\-\-list\fR -.br -\fBcargo\fR [\fIoptions\fR] \fB\-\-help\fR -.br -\fBcargo\fR [\fIoptions\fR] \fB\-\-explain\fR \fIcode\fR -.SH "DESCRIPTION" -This program is a package manager and build tool for the Rust language, -available at \&. -.SH "COMMANDS" -.SS "Build Commands" -\fBcargo\-bench\fR(1) -.br -\ \ \ \ Execute benchmarks of a package. -.sp -\fBcargo\-build\fR(1) -.br -\ \ \ \ Compile a package. -.sp -\fBcargo\-check\fR(1) -.br -\ \ \ \ Check a local package and all of its dependencies for errors. -.sp -\fBcargo\-clean\fR(1) -.br -\ \ \ \ Remove artifacts that Cargo has generated in the past. -.sp -\fBcargo\-doc\fR(1) -.br -\ \ \ \ Build a package's documentation. -.sp -\fBcargo\-fetch\fR(1) -.br -\ \ \ \ Fetch dependencies of a package from the network. -.sp -\fBcargo\-fix\fR(1) -.br -\ \ \ \ Automatically fix lint warnings reported by rustc. -.sp -\fBcargo\-run\fR(1) -.br -\ \ \ \ Run a binary or example of the local package. -.sp -\fBcargo\-rustc\fR(1) -.br -\ \ \ \ Compile a package, and pass extra options to the compiler. -.sp -\fBcargo\-rustdoc\fR(1) -.br -\ \ \ \ Build a package's documentation, using specified custom flags. -.sp -\fBcargo\-test\fR(1) -.br -\ \ \ \ Execute unit and integration tests of a package. -.SS "Manifest Commands" -\fBcargo\-generate\-lockfile\fR(1) -.br -\ \ \ \ Generate \fBCargo.lock\fR for a project. -.sp -\fBcargo\-locate\-project\fR(1) -.br -\ \ \ \ Print a JSON representation of a \fBCargo.toml\fR file's location. -.sp -\fBcargo\-metadata\fR(1) -.br -\ \ \ \ Output the resolved dependencies of a package in machine\-readable format. -.sp -\fBcargo\-pkgid\fR(1) -.br -\ \ \ \ Print a fully qualified package specification. -.sp -\fBcargo\-tree\fR(1) -.br -\ \ \ \ Display a tree visualization of a dependency graph. -.sp -\fBcargo\-update\fR(1) -.br -\ \ \ \ Update dependencies as recorded in the local lock file. -.sp -\fBcargo\-vendor\fR(1) -.br -\ \ \ \ Vendor all dependencies locally. -.sp -\fBcargo\-verify\-project\fR(1) -.br -\ \ \ \ Check correctness of crate manifest. -.SS "Package Commands" -\fBcargo\-init\fR(1) -.br -\ \ \ \ Create a new Cargo package in an existing directory. -.sp -\fBcargo\-install\fR(1) -.br -\ \ \ \ Build and install a Rust binary. -.sp -\fBcargo\-new\fR(1) -.br -\ \ \ \ Create a new Cargo package. -.sp -\fBcargo\-search\fR(1) -.br -\ \ \ \ Search packages in crates.io. -.sp -\fBcargo\-uninstall\fR(1) -.br -\ \ \ \ Remove a Rust binary. -.SS "Publishing Commands" -\fBcargo\-login\fR(1) -.br -\ \ \ \ Save an API token from the registry locally. -.sp -\fBcargo\-owner\fR(1) -.br -\ \ \ \ Manage the owners of a crate on the registry. -.sp -\fBcargo\-package\fR(1) -.br -\ \ \ \ Assemble the local package into a distributable tarball. -.sp -\fBcargo\-publish\fR(1) -.br -\ \ \ \ Upload a package to the registry. -.sp -\fBcargo\-yank\fR(1) -.br -\ \ \ \ Remove a pushed crate from the index. -.SS "General Commands" -\fBcargo\-help\fR(1) -.br -\ \ \ \ Display help information about Cargo. -.sp -\fBcargo\-version\fR(1) -.br -\ \ \ \ Show version information. -.SH "OPTIONS" -.SS "Special Options" -.sp -\fB\-V\fR, -\fB\-\-version\fR -.RS 4 -Print version info and exit. If used with \fB\-\-verbose\fR, prints extra -information. -.RE -.sp -\fB\-\-list\fR -.RS 4 -List all installed Cargo subcommands. If used with \fB\-\-verbose\fR, prints extra -information. -.RE -.sp -\fB\-\-explain\fR \fIcode\fR -.RS 4 -Run \fBrustc \-\-explain CODE\fR which will print out a detailed explanation of an -error message (for example, \fBE0004\fR). -.RE -.SS "Display Options" -.sp -\fB\-v\fR, -\fB\-\-verbose\fR -.RS 4 -Use verbose output. May be specified twice for "very verbose" output which -includes extra output such as dependency warnings and build script output. -May also be specified with the \fBterm.verbose\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-q\fR, -\fB\-\-quiet\fR -.RS 4 -Do not print cargo log messages. -May also be specified with the \fBterm.quiet\fR -\fIconfig value\fR \&. -.RE -.sp -\fB\-\-color\fR \fIwhen\fR -.RS 4 -Control when colored output is used. Valid values: -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBauto\fR (default): Automatically detect if color support is available on the -terminal. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBalways\fR: Always display colors. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fBnever\fR: Never display colors. -.RE -.sp -May also be specified with the \fBterm.color\fR -\fIconfig value\fR \&. -.RE -.SS "Manifest Options" -.sp -\fB\-\-frozen\fR, -\fB\-\-locked\fR -.RS 4 -Either of these flags requires that the \fBCargo.lock\fR file is -up\-to\-date. If the lock file is missing, or it needs to be updated, Cargo will -exit with an error. The \fB\-\-frozen\fR flag also prevents Cargo from -attempting to access the network to determine if it is out\-of\-date. -.sp -These may be used in environments where you want to assert that the -\fBCargo.lock\fR file is up\-to\-date (such as a CI build) or want to avoid network -access. -.RE -.sp -\fB\-\-offline\fR -.RS 4 -Prevents Cargo from accessing the network for any reason. Without this -flag, Cargo will stop with an error if it needs to access the network and -the network is not available. With this flag, Cargo will attempt to -proceed without the network if possible. -.sp -Beware that this may result in different dependency resolution than online -mode. Cargo will restrict itself to crates that are downloaded locally, even -if there might be a newer version as indicated in the local copy of the index. -See the \fBcargo\-fetch\fR(1) command to download dependencies before going -offline. -.sp -May also be specified with the \fBnet.offline\fR \fIconfig value\fR \&. -.RE -.SS "Common Options" -.sp -\fB+\fR\fItoolchain\fR -.RS 4 -If Cargo has been installed with rustup, and the first argument to \fBcargo\fR -begins with \fB+\fR, it will be interpreted as a rustup toolchain name (such -as \fB+stable\fR or \fB+nightly\fR). -See the \fIrustup documentation\fR -for more information about how toolchain overrides work. -.RE -.sp -\fB\-h\fR, -\fB\-\-help\fR -.RS 4 -Prints help information. -.RE -.sp -\fB\-Z\fR \fIflag\fR -.RS 4 -Unstable (nightly\-only) flags to Cargo. Run \fBcargo \-Z help\fR for details. -.RE -.SH "ENVIRONMENT" -See \fIthe reference\fR for -details on environment variables that Cargo reads. -.SH "EXIT STATUS" -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB0\fR: Cargo succeeded. -.RE -.sp -.RS 4 -\h'-04'\(bu\h'+02'\fB101\fR: Cargo failed to complete. -.RE -.SH "FILES" -\fB~/.cargo/\fR -.br -\ \ \ \ Default location for Cargo's "home" directory where it -stores various files. The location can be changed with the \fBCARGO_HOME\fR -environment variable. -.sp -\fB$CARGO_HOME/bin/\fR -.br -\ \ \ \ Binaries installed by \fBcargo\-install\fR(1) will be located here. If using -\fIrustup\fR , executables distributed with Rust are also located here. -.sp -\fB$CARGO_HOME/config.toml\fR -.br -\ \ \ \ The global configuration file. See \fIthe reference\fR -for more information about configuration files. -.sp -\fB\&.cargo/config.toml\fR -.br -\ \ \ \ Cargo automatically searches for a file named \fB\&.cargo/config.toml\fR in the -current directory, and all parent directories. These configuration files -will be merged with the global configuration file. -.sp -\fB$CARGO_HOME/credentials.toml\fR -.br -\ \ \ \ Private authentication information for logging in to a registry. -.sp -\fB$CARGO_HOME/registry/\fR -.br -\ \ \ \ This directory contains cached downloads of the registry index and any -downloaded dependencies. -.sp -\fB$CARGO_HOME/git/\fR -.br -\ \ \ \ This directory contains cached downloads of git dependencies. -.sp -Please note that the internal structure of the \fB$CARGO_HOME\fR directory is not -stable yet and may be subject to change. -.SH "EXAMPLES" -.sp -.RS 4 -\h'-04' 1.\h'+01'Build a local package and all of its dependencies: -.sp -.RS 4 -.nf -cargo build -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 2.\h'+01'Build a package with optimizations: -.sp -.RS 4 -.nf -cargo build \-\-release -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 3.\h'+01'Run tests for a cross\-compiled target: -.sp -.RS 4 -.nf -cargo test \-\-target i686\-unknown\-linux\-gnu -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 4.\h'+01'Create a new package that builds an executable: -.sp -.RS 4 -.nf -cargo new foobar -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 5.\h'+01'Create a package in the current directory: -.sp -.RS 4 -.nf -mkdir foo && cd foo -cargo init . -.fi -.RE -.RE -.sp -.RS 4 -\h'-04' 6.\h'+01'Learn about a command's options and usage: -.sp -.RS 4 -.nf -cargo help clean -.fi -.RE -.RE -.SH "BUGS" -See for issues. -.SH "SEE ALSO" -\fBrustc\fR(1), \fBrustdoc\fR(1) diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/build-std/main.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/build-std/main.rs deleted file mode 100644 index c1355b317..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/build-std/main.rs +++ /dev/null @@ -1,229 +0,0 @@ -//! A test suite for `-Zbuild-std` which is much more expensive than the -//! standard test suite. -//! -//! This test suite attempts to perform a full integration test where we -//! actually compile the standard library from source (like the real one) and -//! the various tests associated with that. -//! -//! YOU SHOULD IDEALLY NOT WRITE TESTS HERE. -//! -//! If possible, use `tests/testsuite/standard_lib.rs` instead. That uses a -//! 'mock' sysroot which is much faster to compile. The tests here are -//! extremely intensive and are only intended to run on CI and are theoretically -//! not catching any regressions that `tests/testsuite/standard_lib.rs` isn't -//! already catching. -//! -//! All tests here should use `#[cargo_test(build_std)]` to indicate that -//! boilerplate should be generated to require the nightly toolchain and the -//! `CARGO_RUN_BUILD_STD_TESTS` env var to be set to actually run these tests. -//! Otherwise the tests are skipped. - -use cargo_test_support::*; -use std::env; -use std::path::Path; - -fn enable_build_std(e: &mut Execs, arg: Option<&str>) { - e.env_remove("CARGO_HOME"); - e.env_remove("HOME"); - - // And finally actually enable `build-std` for now - let arg = match arg { - Some(s) => format!("-Zbuild-std={}", s), - None => "-Zbuild-std".to_string(), - }; - e.arg(arg); - e.masquerade_as_nightly_cargo(); -} - -// Helper methods used in the tests below -trait BuildStd: Sized { - fn build_std(&mut self) -> &mut Self; - fn build_std_arg(&mut self, arg: &str) -> &mut Self; - fn target_host(&mut self) -> &mut Self; -} - -impl BuildStd for Execs { - fn build_std(&mut self) -> &mut Self { - enable_build_std(self, None); - self - } - - fn build_std_arg(&mut self, arg: &str) -> &mut Self { - enable_build_std(self, Some(arg)); - self - } - - fn target_host(&mut self) -> &mut Self { - self.arg("--target").arg(rustc_host()); - self - } -} - -#[cargo_test(build_std)] -fn basic() { - let p = project() - .file( - "src/main.rs", - " - fn main() { - foo::f(); - } - - #[test] - fn smoke_bin_unit() { - foo::f(); - } - ", - ) - .file( - "src/lib.rs", - " - extern crate alloc; - extern crate proc_macro; - - /// ``` - /// foo::f(); - /// ``` - pub fn f() { - } - - #[test] - fn smoke_lib_unit() { - f(); - } - ", - ) - .file( - "tests/smoke.rs", - " - #[test] - fn smoke_integration() { - foo::f(); - } - ", - ) - .build(); - - p.cargo("check").build_std().target_host().run(); - p.cargo("build") - .build_std() - .target_host() - // Importantly, this should not say [UPDATING] - // There have been multiple bugs where every build triggers and update. - .with_stderr( - "[COMPILING] foo v0.0.1 [..]\n\ - [FINISHED] dev [..]", - ) - .run(); - p.cargo("run").build_std().target_host().run(); - p.cargo("test").build_std().target_host().run(); - - // Check for hack that removes dylibs. - let deps_dir = Path::new("target") - .join(rustc_host()) - .join("debug") - .join("deps"); - assert!(p.glob(deps_dir.join("*.rlib")).count() > 0); - assert_eq!(p.glob(deps_dir.join("*.dylib")).count(), 0); -} - -#[cargo_test(build_std)] -fn cross_custom() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [target.custom-target.dependencies] - dep = { path = "dep" } - "#, - ) - .file( - "src/lib.rs", - "#![no_std] pub fn f() -> u32 { dep::answer() }", - ) - .file("dep/Cargo.toml", &basic_manifest("dep", "0.1.0")) - .file("dep/src/lib.rs", "#![no_std] pub fn answer() -> u32 { 42 }") - .file( - "custom-target.json", - r#" - { - "llvm-target": "x86_64-unknown-none-gnu", - "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", - "arch": "x86_64", - "target-endian": "little", - "target-pointer-width": "64", - "target-c-int-width": "32", - "os": "none", - "linker-flavor": "ld.lld" - } - "#, - ) - .build(); - - p.cargo("build --target custom-target.json -v") - .build_std_arg("core") - .run(); -} - -#[cargo_test(build_std)] -fn custom_test_framework() { - let p = project() - .file( - "src/lib.rs", - r#" - #![no_std] - #![cfg_attr(test, no_main)] - #![feature(custom_test_frameworks)] - #![test_runner(crate::test_runner)] - - pub fn test_runner(_tests: &[&dyn Fn()]) {} - - #[panic_handler] - fn panic(_info: &core::panic::PanicInfo) -> ! { - loop {} - } - "#, - ) - .file( - "target.json", - r#" - { - "llvm-target": "x86_64-unknown-none-gnu", - "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", - "arch": "x86_64", - "target-endian": "little", - "target-pointer-width": "64", - "target-c-int-width": "32", - "os": "none", - "linker-flavor": "ld.lld", - "linker": "rust-lld", - "executables": true, - "panic-strategy": "abort" - } - "#, - ) - .build(); - - // This is a bit of a hack to use the rust-lld that ships with most toolchains. - let sysroot = paths::sysroot(); - let sysroot = Path::new(&sysroot); - let sysroot_bin = sysroot - .join("lib") - .join("rustlib") - .join(rustc_host()) - .join("bin"); - let path = env::var_os("PATH").unwrap_or_default(); - let mut paths = env::split_paths(&path).collect::>(); - paths.insert(0, sysroot_bin); - let new_path = env::join_paths(paths).unwrap(); - - p.cargo("test --target target.json --no-run -v") - .env("PATH", new_path) - .build_std_arg("core") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/internal.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/internal.rs deleted file mode 100644 index c42cfa8f0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/internal.rs +++ /dev/null @@ -1,107 +0,0 @@ -//! Tests for internal code checks. - -#![allow(clippy::all)] - -use std::fs; - -#[test] -fn check_forbidden_code() { - // Do not use certain macros, functions, etc. - if !cargo_util::is_ci() { - // Only check these on CI, otherwise it could be annoying. - use std::io::Write; - writeln!( - std::io::stderr(), - "\nSkipping check_forbidden_code test, set CI=1 to enable" - ) - .unwrap(); - return; - } - let root_path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("src"); - for entry in walkdir::WalkDir::new(&root_path) - .into_iter() - .filter_entry(|e| e.path() != root_path.join("doc")) - .filter_map(|e| e.ok()) - { - let path = entry.path(); - if !entry - .file_name() - .to_str() - .map(|s| s.ends_with(".rs")) - .unwrap_or(false) - { - continue; - } - eprintln!("checking {}", path.display()); - let c = fs::read_to_string(path).unwrap(); - for (line_index, line) in c.lines().enumerate() { - if line.trim().starts_with("//") { - continue; - } - if line_has_print(line) { - if entry.file_name().to_str().unwrap() == "cargo_new.rs" && line.contains("Hello") { - // An exception. - continue; - } - panic!( - "found print macro in {}:{}\n\n{}\n\n\ - print! macros should not be used in Cargo because they can panic.\n\ - Use one of the drop_print macros instead.\n\ - ", - path.display(), - line_index, - line - ); - } - if line_has_macro(line, "dbg") { - panic!( - "found dbg! macro in {}:{}\n\n{}\n\n\ - dbg! should not be used outside of debugging.", - path.display(), - line_index, - line - ); - } - } - } -} - -fn line_has_print(line: &str) -> bool { - line_has_macro(line, "print") - || line_has_macro(line, "eprint") - || line_has_macro(line, "println") - || line_has_macro(line, "eprintln") -} - -#[test] -fn line_has_print_works() { - assert!(line_has_print("print!")); - assert!(line_has_print("println!")); - assert!(line_has_print("eprint!")); - assert!(line_has_print("eprintln!")); - assert!(line_has_print("(print!(\"hi!\"))")); - assert!(!line_has_print("print")); - assert!(!line_has_print("i like to print things")); - assert!(!line_has_print("drop_print!")); - assert!(!line_has_print("drop_println!")); - assert!(!line_has_print("drop_eprint!")); - assert!(!line_has_print("drop_eprintln!")); -} - -fn line_has_macro(line: &str, mac: &str) -> bool { - for (i, _) in line.match_indices(mac) { - if line.get(i + mac.len()..i + mac.len() + 1) != Some("!") { - continue; - } - if i == 0 { - return true; - } - // Check for identifier boundary start. - let prev1 = line.get(i - 1..i).unwrap().chars().next().unwrap(); - if prev1.is_alphanumeric() || prev1 == '_' { - continue; - } - return true; - } - false -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/advanced_env.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/advanced_env.rs deleted file mode 100644 index 64d02809f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/advanced_env.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! -Zadvanced-env tests - -use cargo_test_support::{paths, project, registry::Package}; - -#[cargo_test] -// I don't know why, but `Command` forces all env keys to be upper case on -// Windows. Seems questionable, since I think Windows is case-preserving. -#[cfg_attr(windows, ignore)] -fn source_config_env() { - // Try to define [source] with environment variables. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - somedep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("somedep", "1.0.0") - .local(true) - .file("src/lib.rs", "") - .publish(); - - let path = paths::root().join("registry"); - - p.cargo("check -Zadvanced-env") - .masquerade_as_nightly_cargo() - .env("CARGO_SOURCE_crates-io_REPLACE_WITH", "my-local-source") - .env("CARGO_SOURCE_my-local-source_LOCAL_REGISTRY", path) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/alt_registry.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/alt_registry.rs deleted file mode 100644 index b0580d57e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/alt_registry.rs +++ /dev/null @@ -1,1325 +0,0 @@ -//! Tests for alternative registries. - -use cargo::util::IntoUrl; -use cargo_test_support::publish::validate_alt_upload; -use cargo_test_support::registry::{self, Package}; -use cargo_test_support::{basic_manifest, git, paths, project}; -use std::fs; - -#[cargo_test] -fn depend_on_alt_registry() { - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").alternative(true).publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `alternative` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.1 (registry `alternative`) -[COMPILING] bar v0.0.1 (registry `alternative`) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - - p.cargo("clean").run(); - - // Don't download a second time - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 (registry `alternative`) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn depend_on_alt_registry_depends_on_same_registry_no_index() { - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").alternative(true).publish(); - Package::new("bar", "0.0.1") - .registry_dep("baz", "0.0.1") - .alternative(true) - .publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `alternative` index -[DOWNLOADING] crates ... -[DOWNLOADED] [..] v0.0.1 (registry `alternative`) -[DOWNLOADED] [..] v0.0.1 (registry `alternative`) -[COMPILING] baz v0.0.1 (registry `alternative`) -[COMPILING] bar v0.0.1 (registry `alternative`) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn depend_on_alt_registry_depends_on_same_registry() { - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").alternative(true).publish(); - Package::new("bar", "0.0.1") - .registry_dep("baz", "0.0.1") - .alternative(true) - .publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `alternative` index -[DOWNLOADING] crates ... -[DOWNLOADED] [..] v0.0.1 (registry `alternative`) -[DOWNLOADED] [..] v0.0.1 (registry `alternative`) -[COMPILING] baz v0.0.1 (registry `alternative`) -[COMPILING] bar v0.0.1 (registry `alternative`) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn depend_on_alt_registry_depends_on_crates_io() { - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").publish(); - Package::new("bar", "0.0.1") - .dep("baz", "0.0.1") - .alternative(true) - .publish(); - - p.cargo("build") - .with_stderr_unordered( - "\ -[UPDATING] `alternative` index -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.0.1 (registry `dummy-registry`) -[DOWNLOADED] bar v0.0.1 (registry `alternative`) -[COMPILING] baz v0.0.1 -[COMPILING] bar v0.0.1 (registry `alternative`) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn registry_and_path_dep_works() { - registry::alt_init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn registry_incompatible_with_git() { - registry::alt_init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - " dependency (bar) specification is ambiguous. \ - Only one of `git` or `registry` is allowed.", - ) - .run(); -} - -#[cargo_test] -fn cannot_publish_to_crates_io_with_registry_dependency() { - registry::alt_init(); - let fakeio_path = paths::root().join("fake.io"); - let fakeio_url = fakeio_path.into_url().unwrap(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - [dependencies.bar] - version = "0.0.1" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - &format!( - r#" - [registries.fakeio] - index = "{}" - "#, - fakeio_url - ), - ) - .build(); - - Package::new("bar", "0.0.1").alternative(true).publish(); - - // Since this can't really call plain `publish` without fetching the real - // crates.io index, create a fake one that points to the real crates.io. - git::repo(&fakeio_path) - .file( - "config.json", - r#" - {"dl": "https://crates.io/api/v1/crates", "api": "https://crates.io"} - "#, - ) - .build(); - - // Login so that we have the token available - p.cargo("login --registry fakeio TOKEN").run(); - - p.cargo("publish --registry fakeio") - .with_status(101) - .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]") - .run(); - - p.cargo("publish --token sekrit --index") - .arg(fakeio_url.to_string()) - .with_status(101) - .with_stderr_contains("[ERROR] crates cannot be published to crates.io[..]") - .run(); -} - -#[cargo_test] -fn publish_with_registry_dependency() { - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").alternative(true).publish(); - - // Login so that we have the token available - p.cargo("login --registry alternative TOKEN").run(); - - p.cargo("publish --registry alternative").run(); - - validate_alt_upload( - r#"{ - "authors": [], - "badges": {}, - "categories": [], - "deps": [ - { - "default_features": true, - "features": [], - "kind": "normal", - "name": "bar", - "optional": false, - "target": null, - "version_req": "^0.0.1" - } - ], - "description": null, - "documentation": null, - "features": {}, - "homepage": null, - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "homepage": null, - "documentation": null, - "vers": "0.0.1" - }"#, - "foo-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - ); -} - -#[cargo_test] -fn alt_registry_and_crates_io_deps() { - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - crates_io_dep = "0.0.1" - - [dependencies.alt_reg_dep] - version = "0.1.0" - registry = "alternative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("crates_io_dep", "0.0.1").publish(); - Package::new("alt_reg_dep", "0.1.0") - .alternative(true) - .publish(); - - p.cargo("build") - .with_stderr_unordered( - "\ -[UPDATING] `alternative` index -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] crates_io_dep v0.0.1 (registry `dummy-registry`) -[DOWNLOADED] alt_reg_dep v0.1.0 (registry `alternative`) -[COMPILING] alt_reg_dep v0.1.0 (registry `alternative`) -[COMPILING] crates_io_dep v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn block_publish_due_to_no_token() { - registry::alt_init(); - let p = project().file("src/lib.rs", "").build(); - - fs::remove_file(paths::home().join(".cargo/credentials")).unwrap(); - - // Now perform the actual publish - p.cargo("publish --registry alternative") - .with_status(101) - .with_stderr_contains( - "error: no upload token found, \ - please run `cargo login` or pass `--token`", - ) - .run(); -} - -#[cargo_test] -fn publish_to_alt_registry() { - registry::alt_init(); - let p = project().file("src/main.rs", "fn main() {}").build(); - - // Setup the registry by publishing a package - Package::new("bar", "0.0.1").alternative(true).publish(); - - // Login so that we have the token available - p.cargo("login --registry alternative TOKEN").run(); - - // Now perform the actual publish - p.cargo("publish --registry alternative").run(); - - validate_alt_upload( - r#"{ - "authors": [], - "badges": {}, - "categories": [], - "deps": [], - "description": null, - "documentation": null, - "features": {}, - "homepage": null, - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "homepage": null, - "documentation": null, - "vers": "0.0.1" - }"#, - "foo-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - ); -} - -#[cargo_test] -fn publish_with_crates_io_dep() { - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = ["me"] - license = "MIT" - description = "foo" - - [dependencies.bar] - version = "0.0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").publish(); - - // Login so that we have the token available - p.cargo("login --registry alternative TOKEN").run(); - - p.cargo("publish --registry alternative").run(); - - validate_alt_upload( - r#"{ - "authors": ["me"], - "badges": {}, - "categories": [], - "deps": [ - { - "default_features": true, - "features": [], - "kind": "normal", - "name": "bar", - "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", - "target": null, - "version_req": "^0.0.1" - } - ], - "description": "foo", - "documentation": null, - "features": {}, - "homepage": null, - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "homepage": null, - "documentation": null, - "vers": "0.0.1" - }"#, - "foo-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - ); -} - -#[cargo_test] -fn passwords_in_registries_index_url_forbidden() { - registry::alt_init(); - - let config = paths::home().join(".cargo/config"); - - fs::write( - config, - r#" - [registries.alternative] - index = "ssh://git:secret@foobar.com" - "#, - ) - .unwrap(); - - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("publish --registry alternative") - .with_status(101) - .with_stderr( - "\ -error: invalid index URL for registry `alternative` defined in [..]/home/.cargo/config - -Caused by: - registry URLs may not contain passwords -", - ) - .run(); -} - -#[cargo_test] -fn patch_alt_reg() { - registry::alt_init(); - Package::new("bar", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { version = "0.1.0", registry = "alternative" } - - [patch.alternative] - bar = { path = "bar" } - "#, - ) - .file( - "src/lib.rs", - " - extern crate bar; - pub fn f() { bar::bar(); } - ", - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `alternative` index -[COMPILING] bar v0.1.0 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn bad_registry_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - registry = "bad name" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[CWD]/Cargo.toml` - -Caused by: - invalid character ` ` in registry name: `bad name`, [..]", - ) - .run(); - - for cmd in &[ - "init", - "install foo", - "login", - "owner", - "publish", - "search", - "yank --vers 0.0.1", - ] { - p.cargo(cmd) - .arg("--registry") - .arg("bad name") - .with_status(101) - .with_stderr("[ERROR] invalid character ` ` in registry name: `bad name`, [..]") - .run(); - } -} - -#[cargo_test] -fn no_api() { - registry::alt_init(); - Package::new("bar", "0.0.1").alternative(true).publish(); - // Configure without `api`. - let repo = git2::Repository::open(registry::alt_registry_path()).unwrap(); - let cfg_path = registry::alt_registry_path().join("config.json"); - fs::write( - cfg_path, - format!(r#"{{"dl": "{}"}}"#, registry::alt_dl_url()), - ) - .unwrap(); - git::add(&repo); - git::commit(&repo); - - // First check that a dependency works. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies.bar] - version = "0.0.1" - registry = "alternative" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `alternative` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.1 (registry `alternative`) -[COMPILING] bar v0.0.1 (registry `alternative`) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - - // Check all of the API commands. - let err = "[ERROR] registry `alternative` does not support API commands"; - - p.cargo("login --registry alternative TOKEN") - .with_status(101) - .with_stderr_contains(&err) - .run(); - - p.cargo("publish --registry alternative") - .with_status(101) - .with_stderr_contains(&err) - .run(); - - p.cargo("search --registry alternative") - .with_status(101) - .with_stderr_contains(&err) - .run(); - - p.cargo("owner --registry alternative --list") - .with_status(101) - .with_stderr_contains(&err) - .run(); - - p.cargo("yank --registry alternative --vers=0.0.1 bar") - .with_status(101) - .with_stderr_contains(&err) - .run(); - - p.cargo("yank --registry alternative --vers=0.0.1 bar") - .with_stderr_contains(&err) - .with_status(101) - .run(); -} - -#[cargo_test] -fn alt_reg_metadata() { - // Check for "registry" entries in `cargo metadata` with alternative registries. - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - altdep = { version = "0.0.1", registry = "alternative" } - iodep = { version = "0.0.1" } - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("bar", "0.0.1").publish(); - Package::new("altdep", "0.0.1") - .dep("bar", "0.0.1") - .alternative(true) - .publish(); - Package::new("altdep2", "0.0.1").alternative(true).publish(); - Package::new("iodep", "0.0.1") - .registry_dep("altdep2", "0.0.1") - .publish(); - - // The important thing to check here is the "registry" value in `deps`. - // They should be: - // foo -> altdep: alternative-registry - // foo -> iodep: null (because it is in crates.io) - // altdep -> bar: null (because it is in crates.io) - // iodep -> altdep2: alternative-registry - p.cargo("metadata --format-version=1 --no-deps") - .with_json( - r#" - { - "packages": [ - { - "name": "foo", - "version": "0.0.1", - "id": "foo 0.0.1 (path+file:[..]/foo)", - "license": null, - "license_file": null, - "description": null, - "source": null, - "dependencies": [ - { - "name": "altdep", - "source": "registry+file:[..]/alternative-registry", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": "file:[..]/alternative-registry" - }, - { - "name": "iodep", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": null - } - ], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]/foo/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - ], - "workspace_members": [ - "foo 0.0.1 (path+file:[..]/foo)" - ], - "resolve": null, - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); - - // --no-deps uses a different code path, make sure both work. - p.cargo("metadata --format-version=1") - .with_json( - r#" - { - "packages": [ - { - "name": "altdep", - "version": "0.0.1", - "id": "altdep 0.0.1 (registry+file:[..]/alternative-registry)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+file:[..]/alternative-registry", - "dependencies": [ - { - "name": "bar", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": null - } - ], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]/altdep-0.0.1/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - }, - { - "name": "altdep2", - "version": "0.0.1", - "id": "altdep2 0.0.1 (registry+file:[..]/alternative-registry)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+file:[..]/alternative-registry", - "dependencies": [], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]/altdep2-0.0.1/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - }, - { - "name": "bar", - "version": "0.0.1", - "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "dependencies": [], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]/bar-0.0.1/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - }, - { - "name": "foo", - "version": "0.0.1", - "id": "foo 0.0.1 (path+file:[..]/foo)", - "license": null, - "license_file": null, - "description": null, - "source": null, - "dependencies": [ - { - "name": "altdep", - "source": "registry+file:[..]/alternative-registry", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": "file:[..]/alternative-registry" - }, - { - "name": "iodep", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": null - } - ], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]/foo/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - }, - { - "name": "iodep", - "version": "0.0.1", - "id": "iodep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "dependencies": [ - { - "name": "altdep2", - "source": "registry+file:[..]/alternative-registry", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": "file:[..]/alternative-registry" - } - ], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]/iodep-0.0.1/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - ], - "workspace_members": [ - "foo 0.0.1 (path+file:[..]/foo)" - ], - "resolve": "{...}", - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn unknown_registry() { - // A known registry refers to an unknown registry. - // foo -> bar(crates.io) -> baz(alt) - registry::alt_init(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").alternative(true).publish(); - Package::new("bar", "0.0.1") - .registry_dep("baz", "0.0.1") - .publish(); - - // Remove "alternative" from config. - let cfg_path = paths::home().join(".cargo/config"); - let mut config = fs::read_to_string(&cfg_path).unwrap(); - let start = config.find("[registries.alternative]").unwrap(); - config.insert(start, '#'); - let start_index = &config[start..].find("index =").unwrap(); - config.insert(start + start_index, '#'); - fs::write(&cfg_path, config).unwrap(); - - p.cargo("build").run(); - - // Important parts: - // foo -> bar registry = null - // bar -> baz registry = alternate - p.cargo("metadata --format-version=1") - .with_json( - r#" - { - "packages": [ - { - "name": "bar", - "version": "0.0.1", - "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "dependencies": [ - { - "name": "baz", - "source": "registry+file://[..]/alternative-registry", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": "file:[..]/alternative-registry" - } - ], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - }, - { - "name": "baz", - "version": "0.0.1", - "id": "baz 0.0.1 (registry+file://[..]/alternative-registry)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+file://[..]/alternative-registry", - "dependencies": [], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - }, - { - "name": "foo", - "version": "0.0.1", - "id": "foo 0.0.1 (path+file://[..]/foo)", - "license": null, - "license_file": null, - "description": null, - "source": null, - "dependencies": [ - { - "name": "bar", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": null - } - ], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]/foo/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - ], - "workspace_members": [ - "foo 0.0.1 (path+file://[..]/foo)" - ], - "resolve": "{...}", - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - } - "#, - ) - .run(); -} - -#[cargo_test] -fn registries_index_relative_url() { - registry::alt_init(); - let config = paths::root().join(".cargo/config"); - fs::create_dir_all(config.parent().unwrap()).unwrap(); - fs::write( - &config, - r#" - [registries.relative] - index = "file:alternative-registry" - "#, - ) - .unwrap(); - - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - registry = "relative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").alternative(true).publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `relative` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.1 (registry `relative`) -[COMPILING] bar v0.0.1 (registry `relative`) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn registries_index_relative_path_not_allowed() { - registry::alt_init(); - let config = paths::root().join(".cargo/config"); - fs::create_dir_all(config.parent().unwrap()).unwrap(); - fs::write( - &config, - r#" - [registries.relative] - index = "alternative-registry" - "#, - ) - .unwrap(); - - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "0.0.1" - registry = "relative" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").alternative(true).publish(); - - p.cargo("build") - .with_stderr(&format!( - "\ -error: failed to parse manifest at `{root}/foo/Cargo.toml` - -Caused by: - invalid index URL for registry `relative` defined in [..]/.cargo/config - -Caused by: - invalid url `alternative-registry`: relative URL without a base -", - root = paths::root().to_str().unwrap() - )) - .with_status(101) - .run(); -} - -#[cargo_test] -fn both_index_and_registry() { - let p = project().file("src/lib.rs", "").build(); - for cmd in &["publish", "owner", "search", "yank --vers 1.0.0"] { - p.cargo(cmd) - .arg("--registry=foo") - .arg("--index=foo") - .with_status(101) - .with_stderr( - "[ERROR] both `--index` and `--registry` \ - should not be set at the same time", - ) - .run(); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bad_config.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bad_config.rs deleted file mode 100644 index ef06da3e3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bad_config.rs +++ /dev/null @@ -1,1462 +0,0 @@ -//! Tests for some invalid .cargo/config files. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, project, rustc_host}; - -#[cargo_test] -fn bad1() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [target] - nonexistent-target = "foo" - "#, - ) - .build(); - p.cargo("build -v --target=nonexistent-target") - .with_status(101) - .with_stderr( - "\ -[ERROR] invalid configuration for key `target.nonexistent-target` -expected a table, but found a string for `[..]` in [..]config -", - ) - .run(); -} - -#[cargo_test] -fn bad2() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [http] - proxy = 3.0 - "#, - ) - .build(); - p.cargo("publish -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] could not load Cargo configuration - -Caused by: - failed to load TOML configuration from `[..]config` - -Caused by: - failed to parse key `http` - -Caused by: - failed to parse key `proxy` - -Caused by: - found TOML configuration value of unknown type `float` -", - ) - .run(); -} - -#[cargo_test] -fn bad3() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [http] - proxy = true - "#, - ) - .build(); - Package::new("foo", "1.0.0").publish(); - - p.cargo("publish -v") - .with_status(101) - .with_stderr( - "\ -error: failed to update registry [..] - -Caused by: - error in [..]config: `http.proxy` expected a string, but found a boolean -", - ) - .run(); -} - -#[cargo_test] -fn bad4() { - let p = project() - .file( - ".cargo/config", - r#" - [cargo-new] - vcs = false - "#, - ) - .build(); - p.cargo("new -v foo") - .with_status(101) - .with_stderr( - "\ -[ERROR] Failed to create package `foo` at `[..]` - -Caused by: - error in [..]config: `cargo-new.vcs` expected a string, but found a boolean -", - ) - .run(); -} - -#[cargo_test] -fn bad6() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [http] - user-agent = true - "#, - ) - .build(); - Package::new("foo", "1.0.0").publish(); - - p.cargo("publish -v") - .with_status(101) - .with_stderr( - "\ -error: failed to update registry [..] - -Caused by: - error in [..]config: `http.user-agent` expected a string, but found a boolean -", - ) - .run(); -} - -#[cargo_test] -fn bad_cargo_config_jobs() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - jobs = -1 - "#, - ) - .build(); - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] error in [..].cargo/config: \ -could not load config key `build.jobs` - -Caused by: - invalid value: integer `-1`, expected u32 -", - ) - .run(); -} - -#[cargo_test] -fn invalid_global_config() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - foo = "0.1.0" - "#, - ) - .file(".cargo/config", "4") - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] could not load Cargo configuration - -Caused by: - could not parse TOML configuration in `[..]` - -Caused by: - could not parse input as TOML - -Caused by: - expected an equals, found eof at line 1 column 2 -", - ) - .run(); -} - -#[cargo_test] -fn bad_cargo_lock() { - let p = project() - .file("Cargo.lock", "[[package]]\nfoo = 92") - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse lock file at: [..]Cargo.lock - -Caused by: - missing field `name` for key `package` -", - ) - .run(); -} - -#[cargo_test] -fn duplicate_packages_in_cargo_lock() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file( - "Cargo.lock", - r#" - [[package]] - name = "foo" - version = "0.0.1" - dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - ] - - [[package]] - name = "bar" - version = "0.1.0" - source = "registry+https://github.com/rust-lang/crates.io-index" - - [[package]] - name = "bar" - version = "0.1.0" - source = "registry+https://github.com/rust-lang/crates.io-index" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse lock file at: [..] - -Caused by: - package `bar` is specified twice in the lockfile -", - ) - .run(); -} - -#[cargo_test] -fn bad_source_in_cargo_lock() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file( - "Cargo.lock", - r#" - [[package]] - name = "foo" - version = "0.0.1" - dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - ] - - [[package]] - name = "bar" - version = "0.1.0" - source = "You shall not parse" - "#, - ) - .build(); - - p.cargo("build --verbose") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse lock file at: [..] - -Caused by: - invalid source `You shall not parse` for key `package.source` -", - ) - .run(); -} - -#[cargo_test] -fn bad_dependency_in_lockfile() { - let p = project() - .file("src/lib.rs", "") - .file( - "Cargo.lock", - r#" - [[package]] - name = "foo" - version = "0.0.1" - dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - ] - "#, - ) - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn bad_git_dependency() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - foo = { git = "file:.." } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[UPDATING] git repository `file:///` -[ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]` - -Caused by: - failed to load source for dependency `foo` - -Caused by: - Unable to update file:/// - -Caused by: - failed to clone into: [..] - -Caused by: - [..]'file:///' is not a valid local file URI[..] -", - ) - .run(); -} - -#[cargo_test] -fn bad_crate_type() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [lib] - crate-type = ["bad_type", "rlib"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains( - "error: failed to run `rustc` to learn about crate-type bad_type information", - ) - .run(); -} - -#[cargo_test] -fn malformed_override() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [target.x86_64-apple-darwin.freetype] - native = { - foo: "bar" - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - could not parse input as TOML - -Caused by: - expected a table key, found a newline at line 8 column 27 -", - ) - .run(); -} - -#[cargo_test] -fn duplicate_binary_names() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "qqq" - version = "0.1.0" - authors = ["A "] - - [[bin]] - name = "e" - path = "a.rs" - - [[bin]] - name = "e" - path = "b.rs" - "#, - ) - .file("a.rs", r#"fn main() -> () {}"#) - .file("b.rs", r#"fn main() -> () {}"#) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - found duplicate binary name e, but all binary targets must have a unique name -", - ) - .run(); -} - -#[cargo_test] -fn duplicate_example_names() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "qqq" - version = "0.1.0" - authors = ["A "] - - [[example]] - name = "ex" - path = "examples/ex.rs" - - [[example]] - name = "ex" - path = "examples/ex2.rs" - "#, - ) - .file("examples/ex.rs", r#"fn main () -> () {}"#) - .file("examples/ex2.rs", r#"fn main () -> () {}"#) - .build(); - - p.cargo("build --example ex") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - found duplicate example name ex, but all example targets must have a unique name -", - ) - .run(); -} - -#[cargo_test] -fn duplicate_bench_names() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "qqq" - version = "0.1.0" - authors = ["A "] - - [[bench]] - name = "ex" - path = "benches/ex.rs" - - [[bench]] - name = "ex" - path = "benches/ex2.rs" - "#, - ) - .file("benches/ex.rs", r#"fn main () {}"#) - .file("benches/ex2.rs", r#"fn main () {}"#) - .build(); - - p.cargo("bench") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - found duplicate bench name ex, but all bench targets must have a unique name -", - ) - .run(); -} - -#[cargo_test] -fn duplicate_deps() { - let p = project() - .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("shim-bar/src/lib.rs", "pub fn a() {}") - .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("linux-bar/src/lib.rs", "pub fn a() {}") - .file( - "Cargo.toml", - r#" - [package] - name = "qqq" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "shim-bar" } - - [target.x86_64-unknown-linux-gnu.dependencies] - bar = { path = "linux-bar" } - "#, - ) - .file("src/main.rs", r#"fn main () {}"#) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - Dependency 'bar' has different source paths depending on the build target. Each dependency must \ -have a single canonical source path irrespective of build target. -", - ) - .run(); -} - -#[cargo_test] -fn duplicate_deps_diff_sources() { - let p = project() - .file("shim-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("shim-bar/src/lib.rs", "pub fn a() {}") - .file("linux-bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("linux-bar/src/lib.rs", "pub fn a() {}") - .file( - "Cargo.toml", - r#" - [package] - name = "qqq" - version = "0.0.1" - authors = [] - - [target.i686-unknown-linux-gnu.dependencies] - bar = { path = "shim-bar" } - - [target.x86_64-unknown-linux-gnu.dependencies] - bar = { path = "linux-bar" } - "#, - ) - .file("src/main.rs", r#"fn main () {}"#) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - Dependency 'bar' has different source paths depending on the build target. Each dependency must \ -have a single canonical source path irrespective of build target. -", - ) - .run(); -} - -#[cargo_test] -fn unused_keys() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [target.foo] - bar = "3" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -warning: unused manifest key: target.foo.bar -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - bulid = "foo" - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .build(); - p.cargo("build") - .with_stderr( - "\ -warning: unused manifest key: project.bulid -[COMPILING] foo [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - let p = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - build = "foo" - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .build(); - p.cargo("build") - .with_stderr( - "\ -warning: unused manifest key: lib.build -[COMPILING] foo [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn unused_keys_in_virtual_manifest() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - bulid = "foo" - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - p.cargo("build --workspace") - .with_stderr( - "\ -[WARNING] [..]/foo/Cargo.toml: unused manifest key: workspace.bulid -[COMPILING] bar [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn empty_dependencies() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = {} - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").publish(); - - p.cargo("build") - .with_stderr_contains( - "\ -warning: dependency (bar) specified without providing a local path, Git repository, or version \ -to use. This will be considered an error in future versions -", - ) - .run(); -} - -#[cargo_test] -fn invalid_toml_historically_allowed_fails() { - let p = project() - .file(".cargo/config", "[bar] baz = 2") - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains(" expected newline, found an identifier at line 1 column 7") - .run(); -} - -#[cargo_test] -fn ambiguous_git_reference() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = "http://127.0.0.1" - branch = "master" - tag = "some-tag" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - dependency (bar) specification is ambiguous. Only one of `branch`, `tag` or `rev` is allowed. -", - ) - .run(); -} - -#[cargo_test] -fn fragment_in_git_url() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = "http://127.0.0.1#foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains( - "\ -[WARNING] URL fragment `#foo` in git URL is ignored for dependency (bar). \ -If you were trying to specify a specific git revision, \ -use `rev = \"foo\"` in the dependency declaration. -", - ) - .run(); -} - -#[cargo_test] -fn bad_source_config1() { - let p = project() - .file("src/lib.rs", "") - .file(".cargo/config", "[source.foo]") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr("error: no source location specified for `source.foo`, need [..]") - .run(); -} - -#[cargo_test] -fn bad_source_config2() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [source.crates-io] - registry = 'http://example.com' - replace-with = 'bar' - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 [..]` - -Caused by: - failed to load source for dependency `bar` - -Caused by: - Unable to update registry `crates-io` - -Caused by: - could not find a configured source with the name `bar` \ - when attempting to lookup `crates-io` (configuration in [..]) -", - ) - .run(); -} - -#[cargo_test] -fn bad_source_config3() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [source.crates-io] - registry = 'https://example.com' - replace-with = 'crates-io' - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 [..]` - -Caused by: - failed to load source for dependency `bar` - -Caused by: - Unable to update registry `crates-io` - -Caused by: - detected a cycle of `replace-with` sources, [..] -", - ) - .run(); -} - -#[cargo_test] -fn bad_source_config4() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [source.crates-io] - replace-with = 'bar' - - [source.bar] - registry = 'https://example.com' - replace-with = 'crates-io' - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `bar` as a dependency of package `foo v0.0.0 ([..])` - -Caused by: - failed to load source for dependency `bar` - -Caused by: - Unable to update registry `crates-io` - -Caused by: - detected a cycle of `replace-with` sources, the source `crates-io` is \ - eventually replaced with itself (configuration in [..]) -", - ) - .run(); -} - -#[cargo_test] -fn bad_source_config5() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [source.crates-io] - registry = 'https://example.com' - replace-with = 'bar' - - [source.bar] - registry = 'not a url' - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: configuration key `source.bar.registry` specified an invalid URL (in [..]) - -Caused by: - invalid url `not a url`: [..] -", - ) - .run(); -} - -#[cargo_test] -fn both_git_and_path_specified() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = "http://127.0.0.1" - path = "bar" - "#, - ) - .file("src/lib.rs", "") - .build(); - - foo.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - dependency (bar) specification is ambiguous. Only one of `git` or `path` is allowed. -", - ) - .run(); -} - -#[cargo_test] -fn bad_source_config6() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [source.crates-io] - registry = 'https://example.com' - replace-with = ['not', 'a', 'string'] - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] error in [..]/foo/.cargo/config: could not load config key `source.crates-io.replace-with` - -Caused by: - error in [..]/foo/.cargo/config: `source.crates-io.replace-with` expected a string, but found a array -" - ) - .run(); -} - -#[cargo_test] -fn ignored_git_revision() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - path = "bar" - branch = "spam" - "#, - ) - .file("src/lib.rs", "") - .build(); - - foo.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - key `branch` is ignored for dependency (bar). -", - ) - .run(); -} - -#[cargo_test] -fn bad_source_config7() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [source.foo] - registry = 'https://example.com' - local-registry = 'file:///another/file' - "#, - ) - .build(); - - Package::new("bar", "0.1.0").publish(); - - p.cargo("build") - .with_status(101) - .with_stderr("error: more than one source location specified for `source.foo`") - .run(); -} - -#[cargo_test] -fn bad_source_config8() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [source.foo] - branch = "somebranch" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "[ERROR] source definition `source.foo` specifies `branch`, \ - but that requires a `git` key to be specified (in [..]/foo/.cargo/config)", - ) - .run(); -} - -#[cargo_test] -fn bad_dependency() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - bar = 3 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - invalid type: integer `3`, expected a version string like [..] -", - ) - .run(); -} - -#[cargo_test] -fn bad_debuginfo() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [profile.dev] - debug = 'a' - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - expected a boolean or an integer for [..] -", - ) - .run(); -} - -#[cargo_test] -fn bad_opt_level() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = 3 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - expected a boolean or a string for key [..] -", - ) - .run(); -} - -#[cargo_test] -fn warn_semver_metadata() { - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [dependencies] - bar = "1.0.0+1234" - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("check") - .with_stderr_contains("[WARNING] version requirement `1.0.0+1234` for dependency `bar`[..]") - .run(); -} - -#[cargo_test] -fn bad_target_cfg() { - // Invalid type in a StringList. - // - // The error message is a bit unfortunate here. The type here ends up - // being essentially Value>, and each layer of "Value" - // adds some context to the error message. Also, untagged enums provide - // strange error messages. Hopefully most users will be able to untangle - // the message. - let p = project() - .file( - ".cargo/config", - r#" - [target.'cfg(not(target_os = "none"))'] - runner = false - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[ERROR] error in [..]/foo/.cargo/config: \ -could not load config key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` - -Caused by: - error in [..]/foo/.cargo/config: \ - could not load config key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` - -Caused by: - invalid configuration for key `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` - expected a string or array of strings, but found a boolean for \ - `target.\"cfg(not(target_os = \\\"none\\\"))\".runner` in [..]/foo/.cargo/config -", - ) - .run(); -} - -#[cargo_test] -fn bad_target_links_overrides() { - // Invalid parsing of links overrides. - // - // This error message is terrible. Nothing in the deserialization path is - // using config::Value<>, so nothing is able to report the location. I - // think this illustrates how the way things break down with how it - // currently is designed with serde. - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}.somelib] - rustc-flags = 'foo' - "#, - rustc_host() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "[ERROR] Only `-l` and `-L` flags are allowed in target config \ - `target.[..].rustc-flags` (in [..]foo/.cargo/config): `foo`", - ) - .run(); - - p.change_file( - ".cargo/config", - &format!( - "[target.{}.somelib] - warning = \"foo\" - ", - rustc_host(), - ), - ); - p.cargo("check") - .with_status(101) - .with_stderr("[ERROR] `warning` is not supported in build script overrides") - .run(); -} - -#[cargo_test] -fn redefined_sources() { - // Cannot define a source multiple times. - let p = project() - .file( - ".cargo/config", - r#" - [source.foo] - registry = "https://github.com/rust-lang/crates.io-index" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[ERROR] source `foo` defines source registry `crates-io`, \ - but that source is already defined by `crates-io` -note: Sources are not allowed to be defined multiple times. -", - ) - .run(); - - p.change_file( - ".cargo/config", - r#" - [source.one] - directory = "index" - - [source.two] - directory = "index" - "#, - ); - - // Name is `[..]` because we can't guarantee the order. - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[ERROR] source `[..]` defines source dir [..]/foo/index, \ - but that source is already defined by `[..]` -note: Sources are not allowed to be defined multiple times. -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bad_manifest_path.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bad_manifest_path.rs deleted file mode 100644 index 12b89c6f4..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bad_manifest_path.rs +++ /dev/null @@ -1,386 +0,0 @@ -//! Tests for invalid --manifest-path arguments. - -use cargo_test_support::{basic_bin_manifest, main_file, project}; - -#[track_caller] -fn assert_not_a_cargo_toml(command: &str, manifest_path_argument: &str) { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo(command) - .arg("--manifest-path") - .arg(manifest_path_argument) - .cwd(p.root().parent().unwrap()) - .with_status(101) - .with_stderr( - "[ERROR] the manifest-path must be a path \ - to a Cargo.toml file", - ) - .run(); -} - -#[track_caller] -fn assert_cargo_toml_doesnt_exist(command: &str, manifest_path_argument: &str) { - let p = project().build(); - let expected_path = manifest_path_argument - .split('/') - .collect::>() - .join("[..]"); - - p.cargo(command) - .arg("--manifest-path") - .arg(manifest_path_argument) - .cwd(p.root().parent().unwrap()) - .with_status(101) - .with_stderr(format!( - "[ERROR] manifest path `{}` does not exist", - expected_path - )) - .run(); -} - -#[cargo_test] -fn bench_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("bench", "foo"); -} - -#[cargo_test] -fn bench_dir_plus_file() { - assert_not_a_cargo_toml("bench", "foo/bar"); -} - -#[cargo_test] -fn bench_dir_plus_path() { - assert_not_a_cargo_toml("bench", "foo/bar/baz"); -} - -#[cargo_test] -fn bench_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("bench", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn build_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("build", "foo"); -} - -#[cargo_test] -fn build_dir_plus_file() { - assert_not_a_cargo_toml("bench", "foo/bar"); -} - -#[cargo_test] -fn build_dir_plus_path() { - assert_not_a_cargo_toml("bench", "foo/bar/baz"); -} - -#[cargo_test] -fn build_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("build", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn clean_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("clean", "foo"); -} - -#[cargo_test] -fn clean_dir_plus_file() { - assert_not_a_cargo_toml("clean", "foo/bar"); -} - -#[cargo_test] -fn clean_dir_plus_path() { - assert_not_a_cargo_toml("clean", "foo/bar/baz"); -} - -#[cargo_test] -fn clean_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("clean", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn doc_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("doc", "foo"); -} - -#[cargo_test] -fn doc_dir_plus_file() { - assert_not_a_cargo_toml("doc", "foo/bar"); -} - -#[cargo_test] -fn doc_dir_plus_path() { - assert_not_a_cargo_toml("doc", "foo/bar/baz"); -} - -#[cargo_test] -fn doc_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("doc", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn fetch_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("fetch", "foo"); -} - -#[cargo_test] -fn fetch_dir_plus_file() { - assert_not_a_cargo_toml("fetch", "foo/bar"); -} - -#[cargo_test] -fn fetch_dir_plus_path() { - assert_not_a_cargo_toml("fetch", "foo/bar/baz"); -} - -#[cargo_test] -fn fetch_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("fetch", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn generate_lockfile_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("generate-lockfile", "foo"); -} - -#[cargo_test] -fn generate_lockfile_dir_plus_file() { - assert_not_a_cargo_toml("generate-lockfile", "foo/bar"); -} - -#[cargo_test] -fn generate_lockfile_dir_plus_path() { - assert_not_a_cargo_toml("generate-lockfile", "foo/bar/baz"); -} - -#[cargo_test] -fn generate_lockfile_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("generate-lockfile", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn package_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("package", "foo"); -} - -#[cargo_test] -fn package_dir_plus_file() { - assert_not_a_cargo_toml("package", "foo/bar"); -} - -#[cargo_test] -fn package_dir_plus_path() { - assert_not_a_cargo_toml("package", "foo/bar/baz"); -} - -#[cargo_test] -fn package_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("package", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn pkgid_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("pkgid", "foo"); -} - -#[cargo_test] -fn pkgid_dir_plus_file() { - assert_not_a_cargo_toml("pkgid", "foo/bar"); -} - -#[cargo_test] -fn pkgid_dir_plus_path() { - assert_not_a_cargo_toml("pkgid", "foo/bar/baz"); -} - -#[cargo_test] -fn pkgid_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("pkgid", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn publish_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("publish", "foo"); -} - -#[cargo_test] -fn publish_dir_plus_file() { - assert_not_a_cargo_toml("publish", "foo/bar"); -} - -#[cargo_test] -fn publish_dir_plus_path() { - assert_not_a_cargo_toml("publish", "foo/bar/baz"); -} - -#[cargo_test] -fn publish_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("publish", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn read_manifest_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("read-manifest", "foo"); -} - -#[cargo_test] -fn read_manifest_dir_plus_file() { - assert_not_a_cargo_toml("read-manifest", "foo/bar"); -} - -#[cargo_test] -fn read_manifest_dir_plus_path() { - assert_not_a_cargo_toml("read-manifest", "foo/bar/baz"); -} - -#[cargo_test] -fn read_manifest_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("read-manifest", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn run_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("run", "foo"); -} - -#[cargo_test] -fn run_dir_plus_file() { - assert_not_a_cargo_toml("run", "foo/bar"); -} - -#[cargo_test] -fn run_dir_plus_path() { - assert_not_a_cargo_toml("run", "foo/bar/baz"); -} - -#[cargo_test] -fn run_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("run", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn rustc_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("rustc", "foo"); -} - -#[cargo_test] -fn rustc_dir_plus_file() { - assert_not_a_cargo_toml("rustc", "foo/bar"); -} - -#[cargo_test] -fn rustc_dir_plus_path() { - assert_not_a_cargo_toml("rustc", "foo/bar/baz"); -} - -#[cargo_test] -fn rustc_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("rustc", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn test_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("test", "foo"); -} - -#[cargo_test] -fn test_dir_plus_file() { - assert_not_a_cargo_toml("test", "foo/bar"); -} - -#[cargo_test] -fn test_dir_plus_path() { - assert_not_a_cargo_toml("test", "foo/bar/baz"); -} - -#[cargo_test] -fn test_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("test", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn update_dir_containing_cargo_toml() { - assert_not_a_cargo_toml("update", "foo"); -} - -#[cargo_test] -fn update_dir_plus_file() { - assert_not_a_cargo_toml("update", "foo/bar"); -} - -#[cargo_test] -fn update_dir_plus_path() { - assert_not_a_cargo_toml("update", "foo/bar/baz"); -} - -#[cargo_test] -fn update_dir_to_nonexistent_cargo_toml() { - assert_cargo_toml_doesnt_exist("update", "foo/bar/baz/Cargo.toml"); -} - -#[cargo_test] -fn verify_project_dir_containing_cargo_toml() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("verify-project --manifest-path foo") - .cwd(p.root().parent().unwrap()) - .with_status(1) - .with_stdout( - "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ - ", - ) - .run(); -} - -#[cargo_test] -fn verify_project_dir_plus_file() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("verify-project --manifest-path foo/bar") - .cwd(p.root().parent().unwrap()) - .with_status(1) - .with_stdout( - "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ - ", - ) - .run(); -} - -#[cargo_test] -fn verify_project_dir_plus_path() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("verify-project --manifest-path foo/bar/baz") - .cwd(p.root().parent().unwrap()) - .with_status(1) - .with_stdout( - "{\"invalid\":\"the manifest-path must be a path to a Cargo.toml file\"}\ - ", - ) - .run(); -} - -#[cargo_test] -fn verify_project_dir_to_nonexistent_cargo_toml() { - let p = project().build(); - p.cargo("verify-project --manifest-path foo/bar/baz/Cargo.toml") - .cwd(p.root().parent().unwrap()) - .with_status(1) - .with_stdout( - "{\"invalid\":\"manifest path `foo[..]bar[..]baz[..]Cargo.toml` does not exist\"}\ - ", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bench.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bench.rs deleted file mode 100644 index a3923dced..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/bench.rs +++ /dev/null @@ -1,1758 +0,0 @@ -//! Tests for the `cargo bench` command. - -use cargo_test_support::is_nightly; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project}; - -#[cargo_test] -fn cargo_bench_simple() { - if !is_nightly() { - return; - } - - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[bench] - fn bench_hello(_b: &mut test::Bencher) { - assert_eq!(hello(), "hello") - } - "#, - ) - .build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("hello\n").run(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test bench_hello ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_bench_implicit() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "src/main.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - fn main() { println!("Hello main!"); } - "#, - ) - .file( - "tests/other.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run3(_ben: &mut test::Bencher) { } - "#, - ) - .file( - "benches/mybench.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run2(_ben: &mut test::Bencher) { } - "#, - ) - .build(); - - p.cargo("bench --benches") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE]) -[RUNNING] [..] (target/release/deps/mybench-[..][EXE]) -", - ) - .with_stdout_contains("test run2 ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_bin_implicit() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "src/main.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - fn main() { println!("Hello main!"); } - "#, - ) - .file( - "tests/other.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run3(_ben: &mut test::Bencher) { } - "#, - ) - .file( - "benches/mybench.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run2(_ben: &mut test::Bencher) { } - "#, - ) - .build(); - - p.cargo("bench --bins") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE]) -", - ) - .with_stdout_contains("test run1 ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_tarname() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "benches/bin1.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - "#, - ) - .file( - "benches/bin2.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run2(_ben: &mut test::Bencher) { } - "#, - ) - .build(); - - p.cargo("bench --bench bin2") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/bin2-[..][EXE]) -", - ) - .with_stdout_contains("test run2 ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_multiple_targets() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "benches/bin1.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - "#, - ) - .file( - "benches/bin2.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run2(_ben: &mut test::Bencher) { } - "#, - ) - .file( - "benches/bin3.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run3(_ben: &mut test::Bencher) { } - "#, - ) - .build(); - - p.cargo("bench --bench bin1 --bench bin2") - .with_stdout_contains("test run1 ... bench: [..]") - .with_stdout_contains("test run2 ... bench: [..]") - .with_stdout_does_not_contain("[..]run3[..]") - .run(); -} - -#[cargo_test] -fn cargo_bench_verbose() { - if !is_nightly() { - return; - } - - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - fn main() {} - #[bench] fn bench_hello(_b: &mut test::Bencher) {} - "#, - ) - .build(); - - p.cargo("bench -v hello") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc [..] src/main.rs [..]` -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `[..]target/release/deps/foo-[..][EXE] hello --bench`", - ) - .with_stdout_contains("test bench_hello ... bench: [..]") - .run(); -} - -#[cargo_test] -fn many_similar_names() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "src/lib.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate test; - pub fn foo() {} - #[bench] fn lib_bench(_b: &mut test::Bencher) {} - ", - ) - .file( - "src/main.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate foo; - #[cfg(test)] - extern crate test; - fn main() {} - #[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() } - ", - ) - .file( - "benches/foo.rs", - r#" - #![feature(test)] - extern crate foo; - extern crate test; - #[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() } - "#, - ) - .build(); - - p.cargo("bench") - .with_stdout_contains("test bin_bench ... bench: 0 ns/iter (+/- 0)") - .with_stdout_contains("test lib_bench ... bench: 0 ns/iter (+/- 0)") - .with_stdout_contains("test bench_bench ... bench: 0 ns/iter (+/- 0)") - .run(); -} - -#[cargo_test] -fn cargo_bench_failing_test() { - if !is_nightly() { - return; - } - - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[bench] - fn bench_hello(_b: &mut test::Bencher) { - assert_eq!(hello(), "nope") - } - "#, - ) - .build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("hello\n").run(); - - // Force libtest into serial execution so that the test header will be printed. - p.cargo("bench -- --test-threads=1") - .with_stdout_contains("test bench_hello ...[..]") - .with_stderr_contains( - "\ -[COMPILING] foo v0.5.0 ([CWD])[..] -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains( - "[..]thread '[..]' panicked at 'assertion failed: `(left == right)`[..]", - ) - .with_stdout_contains("[..]left: `\"hello\"`[..]") - .with_stdout_contains("[..]right: `\"nope\"`[..]") - .with_stdout_contains("[..]src/main.rs:15[..]") - .with_status(101) - .run(); -} - -#[cargo_test] -fn bench_with_lib_dep() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "baz" - path = "src/main.rs" - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - /// - /// ```rust - /// extern crate foo; - /// fn main() { - /// println!("{}", foo::foo()); - /// } - /// ``` - /// - pub fn foo(){} - #[bench] fn lib_bench(_b: &mut test::Bencher) {} - "#, - ) - .file( - "src/main.rs", - " - #![feature(test)] - #[allow(unused_extern_crates)] - extern crate foo; - #[cfg(test)] - extern crate test; - - fn main() {} - - #[bench] - fn bin_bench(_b: &mut test::Bencher) {} - ", - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE]) -[RUNNING] [..] (target/release/deps/baz-[..][EXE])", - ) - .with_stdout_contains("test lib_bench ... bench: [..]") - .with_stdout_contains("test bin_bench ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_with_deep_lib_dep() { - if !is_nightly() { - return; - } - - let p = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies.foo] - path = "../foo" - "#, - ) - .file( - "src/lib.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate foo; - #[cfg(test)] - extern crate test; - #[bench] - fn bar_bench(_b: &mut test::Bencher) { - foo::foo(); - } - ", - ) - .build(); - let _p2 = project() - .file( - "src/lib.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate test; - - pub fn foo() {} - - #[bench] - fn foo_bench(_b: &mut test::Bencher) {} - ", - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[COMPILING] bar v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/bar-[..][EXE])", - ) - .with_stdout_contains("test bar_bench ... bench: [..]") - .run(); -} - -#[cargo_test] -fn external_bench_explicit() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bench]] - name = "bench" - path = "src/bench.rs" - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - pub fn get_hello() -> &'static str { "Hello" } - - #[bench] - fn internal_bench(_b: &mut test::Bencher) {} - "#, - ) - .file( - "src/bench.rs", - r#" - #![feature(test)] - #[allow(unused_extern_crates)] - extern crate foo; - extern crate test; - - #[bench] - fn external_bench(_b: &mut test::Bencher) {} - "#, - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE]) -[RUNNING] [..] (target/release/deps/bench-[..][EXE])", - ) - .with_stdout_contains("test internal_bench ... bench: [..]") - .with_stdout_contains("test external_bench ... bench: [..]") - .run(); -} - -#[cargo_test] -fn external_bench_implicit() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "src/lib.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - - pub fn get_hello() -> &'static str { "Hello" } - - #[bench] - fn internal_bench(_b: &mut test::Bencher) {} - "#, - ) - .file( - "benches/external.rs", - r#" - #![feature(test)] - #[allow(unused_extern_crates)] - extern crate foo; - extern crate test; - - #[bench] - fn external_bench(_b: &mut test::Bencher) {} - "#, - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE]) -[RUNNING] [..] (target/release/deps/external-[..][EXE])", - ) - .with_stdout_contains("test internal_bench ... bench: [..]") - .with_stdout_contains("test external_bench ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_autodiscover_2015() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - edition = "2015" - - [features] - magic = [] - - [[bench]] - name = "bench_magic" - required-features = ["magic"] - "#, - ) - .file("src/lib.rs", "") - .file( - "benches/bench_basic.rs", - r#" - #![feature(test)] - #[allow(unused_extern_crates)] - extern crate foo; - extern crate test; - - #[bench] - fn bench_basic(_b: &mut test::Bencher) {} - "#, - ) - .file( - "benches/bench_magic.rs", - r#" - #![feature(test)] - #[allow(unused_extern_crates)] - extern crate foo; - extern crate test; - - #[bench] - fn bench_magic(_b: &mut test::Bencher) {} - "#, - ) - .build(); - - p.cargo("bench bench_basic") - .with_stderr( - "warning: \ -An explicit [[bench]] section is specified in Cargo.toml which currently -disables Cargo from automatically inferring other benchmark targets. -This inference behavior will change in the Rust 2018 edition and the following -files will be included as a benchmark target: - -* [..]bench_basic.rs - -This is likely to break cargo build or cargo test as these files may not be -ready to be compiled as a benchmark target today. You can future-proof yourself -and disable this warning by adding `autobenches = false` to your [package] -section. You may also move the files to a location where Cargo would not -automatically infer them to be a target, such as in subfolders. - -For more information on this warning you can consult -https://github.com/rust-lang/cargo/issues/5330 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE]) -", - ) - .run(); -} - -#[cargo_test] -fn dont_run_examples() { - if !is_nightly() { - return; - } - - let p = project() - .file("src/lib.rs", "") - .file( - "examples/dont-run-me-i-will-fail.rs", - r#"fn main() { panic!("Examples should not be run by 'cargo test'"); }"#, - ) - .build(); - p.cargo("bench").run(); -} - -#[cargo_test] -fn pass_through_command_line() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "src/lib.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate test; - - #[bench] fn foo(_b: &mut test::Bencher) {} - #[bench] fn bar(_b: &mut test::Bencher) {} - ", - ) - .build(); - - p.cargo("bench bar") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test bar ... bench: [..]") - .run(); - - p.cargo("bench foo") - .with_stderr( - "[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test foo ... bench: [..]") - .run(); -} - -// Regression test for running cargo-bench twice with -// tests in an rlib -#[cargo_test] -fn cargo_bench_twice() { - if !is_nightly() { - return; - } - - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file( - "src/foo.rs", - r#" - #![crate_type = "rlib"] - #![feature(test)] - #[cfg(test)] - extern crate test; - - #[bench] - fn dummy_bench(b: &mut test::Bencher) { } - "#, - ) - .build(); - - for _ in 0..2 { - p.cargo("bench").run(); - } -} - -#[cargo_test] -fn lib_bin_same_name() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - [[bin]] - name = "foo" - "#, - ) - .file( - "src/lib.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate test; - #[bench] fn lib_bench(_b: &mut test::Bencher) {} - ", - ) - .file( - "src/main.rs", - " - #![feature(test)] - #[allow(unused_extern_crates)] - extern crate foo; - #[cfg(test)] - extern crate test; - - #[bench] - fn bin_bench(_b: &mut test::Bencher) {} - ", - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE]) -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains_n("test [..] ... bench: [..]", 2) - .run(); -} - -#[cargo_test] -fn lib_with_standard_name() { - if !is_nightly() { - return; - } - - let p = project() - .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) - .file( - "src/lib.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate test; - - /// ``` - /// syntax::foo(); - /// ``` - pub fn foo() {} - - #[bench] - fn foo_bench(_b: &mut test::Bencher) {} - ", - ) - .file( - "benches/bench.rs", - " - #![feature(test)] - extern crate syntax; - extern crate test; - - #[bench] - fn bench(_b: &mut test::Bencher) { syntax::foo() } - ", - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] syntax v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/syntax-[..][EXE]) -[RUNNING] [..] (target/release/deps/bench-[..][EXE])", - ) - .with_stdout_contains("test foo_bench ... bench: [..]") - .with_stdout_contains("test bench ... bench: [..]") - .run(); -} - -#[cargo_test] -fn lib_with_standard_name2() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - name = "syntax" - bench = false - doctest = false - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/main.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate syntax; - #[cfg(test)] - extern crate test; - - fn main() {} - - #[bench] - fn bench(_b: &mut test::Bencher) { syntax::foo() } - ", - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] syntax v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/syntax-[..][EXE])", - ) - .with_stdout_contains("test bench ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_dylib() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate_type = ["dylib"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(test)] - extern crate bar as the_bar; - #[cfg(test)] - extern crate test; - - pub fn bar() { the_bar::baz(); } - - #[bench] - fn foo(_b: &mut test::Bencher) {} - "#, - ) - .file( - "benches/bench.rs", - r#" - #![feature(test)] - extern crate foo as the_foo; - extern crate test; - - #[bench] - fn foo(_b: &mut test::Bencher) { the_foo::bar(); } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - crate_type = ["dylib"] - "#, - ) - .file("bar/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("bench -v") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[RUNNING] [..] -C opt-level=3 [..] -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] [..] -C opt-level=3 [..] -[RUNNING] [..] -C opt-level=3 [..] -[RUNNING] [..] -C opt-level=3 [..] -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench` -[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`", - ) - .with_stdout_contains_n("test foo ... bench: [..]", 2) - .run(); - - p.root().move_into_the_past(); - p.cargo("bench -v") - .with_stderr( - "\ -[FRESH] bar v0.0.1 ([CWD]/bar) -[FRESH] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `[..]target/release/deps/foo-[..][EXE] --bench` -[RUNNING] `[..]target/release/deps/bench-[..][EXE] --bench`", - ) - .with_stdout_contains_n("test foo ... bench: [..]", 2) - .run(); -} - -#[cargo_test] -fn bench_twice_with_build_cmd() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("build.rs", "fn main() {}") - .file( - "src/lib.rs", - " - #![feature(test)] - #[cfg(test)] - extern crate test; - #[bench] - fn foo(_b: &mut test::Bencher) {} - ", - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test foo ... bench: [..]") - .run(); - - p.cargo("bench") - .with_stderr( - "[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test foo ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_with_examples() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "6.6.6" - authors = [] - - [[example]] - name = "teste1" - - [[bench]] - name = "testb1" - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - #[cfg(test)] - use test::Bencher; - - pub fn f1() { - println!("f1"); - } - - pub fn f2() {} - - #[bench] - fn bench_bench1(_b: &mut Bencher) { - f2(); - } - "#, - ) - .file( - "benches/testb1.rs", - " - #![feature(test)] - extern crate foo; - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_bench2(_b: &mut Bencher) { - foo::f2(); - } - ", - ) - .file( - "examples/teste1.rs", - r#" - extern crate foo; - - fn main() { - println!("example1"); - foo::f1(); - } - "#, - ) - .build(); - - p.cargo("bench -v") - .with_stderr( - "\ -[COMPILING] foo v6.6.6 ([CWD]) -[RUNNING] `rustc [..]` -[RUNNING] `rustc [..]` -[RUNNING] `rustc [..]` -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `[CWD]/target/release/deps/foo-[..][EXE] --bench` -[RUNNING] `[CWD]/target/release/deps/testb1-[..][EXE] --bench`", - ) - .with_stdout_contains("test bench_bench1 ... bench: [..]") - .with_stdout_contains("test bench_bench2 ... bench: [..]") - .run(); -} - -#[cargo_test] -fn test_a_bench() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - authors = [] - version = "0.1.0" - - [lib] - name = "foo" - test = false - doctest = false - - [[bench]] - name = "b" - test = true - "#, - ) - .file("src/lib.rs", "") - .file("benches/b.rs", "#[test] fn foo() {}") - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.1.0 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/b-[..][EXE])", - ) - .with_stdout_contains("test foo ... ok") - .run(); -} - -#[cargo_test] -fn test_bench_no_run() { - if !is_nightly() { - return; - } - - let p = project() - .file("src/lib.rs", "") - .file( - "benches/bbaz.rs", - r#" - #![feature(test)] - - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_baz(_: &mut Bencher) {} - "#, - ) - .build(); - - p.cargo("bench --no-run") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] bench [optimized] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn test_bench_no_fail_fast() { - if !is_nightly() { - return; - } - - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/foo.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[bench] - fn bench_hello(_b: &mut test::Bencher) { - assert_eq!(hello(), "hello") - } - - #[bench] - fn bench_nope(_b: &mut test::Bencher) { - assert_eq!("nope", hello()) - } - "#, - ) - .build(); - - p.cargo("bench --no-fail-fast -- --test-threads=1") - .with_status(101) - .with_stderr_contains("[RUNNING] [..] (target/release/deps/foo-[..][EXE])") - .with_stdout_contains("running 2 tests") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/foo-[..][EXE])") - .with_stdout_contains("test bench_hello [..]") - .with_stdout_contains("test bench_nope [..]") - .run(); -} - -#[cargo_test] -fn test_bench_multiple_packages() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - authors = [] - version = "0.1.0" - - [dependencies.bar] - path = "../bar" - - [dependencies.baz] - path = "../baz" - "#, - ) - .file("src/lib.rs", "") - .build(); - - let _bar = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - authors = [] - version = "0.1.0" - - [[bench]] - name = "bbar" - test = true - "#, - ) - .file("src/lib.rs", "") - .file( - "benches/bbar.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_bar(_b: &mut Bencher) {} - "#, - ) - .build(); - - let _baz = project() - .at("baz") - .file( - "Cargo.toml", - r#" - [project] - name = "baz" - authors = [] - version = "0.1.0" - - [[bench]] - name = "bbaz" - test = true - "#, - ) - .file("src/lib.rs", "") - .file( - "benches/bbaz.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_baz(_b: &mut Bencher) {} - "#, - ) - .build(); - - p.cargo("bench -p bar -p baz") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/bbaz-[..][EXE])") - .with_stdout_contains("test bench_baz ... bench: [..]") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/bbar-[..][EXE])") - .with_stdout_contains("test bench_bar ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_all_workspace() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "benches/foo.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_foo(_: &mut Bencher) -> () { () } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file( - "bar/benches/bar.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_bar(_: &mut Bencher) -> () { () } - "#, - ) - .build(); - - p.cargo("bench --workspace") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])") - .with_stdout_contains("test bench_bar ... bench: [..]") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/foo-[..][EXE])") - .with_stdout_contains("test bench_foo ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_all_exclude() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file( - "bar/src/lib.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - - #[bench] - pub fn bar(b: &mut test::Bencher) { - b.iter(|| {}); - } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file( - "baz/src/lib.rs", - "#[test] pub fn baz() { break_the_build(); }", - ) - .build(); - - p.cargo("bench --workspace --exclude baz") - .with_stdout_contains( - "\ -running 1 test -test bar ... bench: [..] ns/iter (+/- [..])", - ) - .run(); -} - -#[cargo_test] -fn bench_all_exclude_glob() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file( - "bar/src/lib.rs", - r#" - #![feature(test)] - #[cfg(test)] - extern crate test; - - #[bench] - pub fn bar(b: &mut test::Bencher) { - b.iter(|| {}); - } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file( - "baz/src/lib.rs", - "#[test] pub fn baz() { break_the_build(); }", - ) - .build(); - - p.cargo("bench --workspace --exclude '*z'") - .with_stdout_contains( - "\ -running 1 test -test bar ... bench: [..] ns/iter (+/- [..])", - ) - .run(); -} - -#[cargo_test] -fn bench_all_virtual_manifest() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file( - "bar/benches/bar.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_bar(_: &mut Bencher) -> () { () } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .file( - "baz/benches/baz.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_baz(_: &mut Bencher) -> () { () } - "#, - ) - .build(); - - // The order in which bar and baz are built is not guaranteed - p.cargo("bench --workspace") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])") - .with_stdout_contains("test bench_baz ... bench: [..]") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])") - .with_stdout_contains("test bench_bar ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_virtual_manifest_glob() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") - .file( - "bar/benches/bar.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_bar(_: &mut Bencher) -> () { break_the_build(); } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .file( - "baz/benches/baz.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_baz(_: &mut Bencher) -> () { () } - "#, - ) - .build(); - - // The order in which bar and baz are built is not guaranteed - p.cargo("bench -p '*z'") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])") - .with_stdout_contains("test bench_baz ... bench: [..]") - .with_stderr_does_not_contain("[RUNNING] [..] (target/release/deps/bar-[..][EXE])") - .with_stdout_does_not_contain("test bench_bar ... bench: [..]") - .run(); -} - -// https://github.com/rust-lang/cargo/issues/4287 -#[cargo_test] -fn legacy_bench_name() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [[bench]] - name = "bench" - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/bench.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_foo(_: &mut Bencher) -> () { () } - "#, - ) - .build(); - - p.cargo("bench") - .with_stderr_contains( - "\ -[WARNING] path `[..]src/bench.rs` was erroneously implicitly accepted for benchmark `bench`, -please set bench.path in Cargo.toml", - ) - .run(); -} - -#[cargo_test] -fn bench_virtual_manifest_all_implied() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn foo() {}") - .file( - "bar/benches/bar.rs", - r#" - #![feature(test)] - extern crate test; - use test::Bencher; - #[bench] - fn bench_bar(_: &mut Bencher) -> () { () } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .file( - "baz/benches/baz.rs", - r#" - #![feature(test)] - extern crate test; - use test::Bencher; - #[bench] - fn bench_baz(_: &mut Bencher) -> () { () } - "#, - ) - .build(); - - // The order in which bar and baz are built is not guaranteed - - p.cargo("bench") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/baz-[..][EXE])") - .with_stdout_contains("test bench_baz ... bench: [..]") - .with_stderr_contains("[RUNNING] [..] (target/release/deps/bar-[..][EXE])") - .with_stdout_contains("test bench_bar ... bench: [..]") - .run(); -} - -#[cargo_test] -fn json_artifact_includes_executable_for_benchmark() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "benches/benchmark.rs", - r#" - #![feature(test)] - extern crate test; - - use test::Bencher; - - #[bench] - fn bench_foo(_: &mut Bencher) -> () { () } - "#, - ) - .build(); - - p.cargo("bench --no-run --message-format=json") - .with_json( - r#" - { - "executable": "[..]/foo/target/release/deps/benchmark-[..][EXE]", - "features": [], - "filenames": "{...}", - "fresh": false, - "package_id": "foo 0.0.1 ([..])", - "manifest_path": "[..]", - "profile": "{...}", - "reason": "compiler-artifact", - "target": { - "crate_types": [ "bin" ], - "kind": [ "bench" ], - "doc": false, - "doctest": false, - "edition": "2015", - "name": "benchmark", - "src_path": "[..]/foo/benches/benchmark.rs", - "test": false - } - } - - {"reason": "build-finished", "success": true} - "#, - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/binary_name.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/binary_name.rs deleted file mode 100644 index 65a4f75b5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/binary_name.rs +++ /dev/null @@ -1,291 +0,0 @@ -use cargo_test_support::install::{ - assert_has_installed_exe, assert_has_not_installed_exe, cargo_home, -}; -use cargo_test_support::project; - -#[cargo_test] -fn gated() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - - [[bin]] - name = "foo" - filename = "007bar" - path = "src/main.rs" - "#, - ) - .file("src/main.rs", "fn main() { assert!(true) }") - .build(); - - // Run cargo build. - p.cargo("build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains("[..]feature `different-binary-name` is required") - .run(); -} - -#[cargo_test] -// This test checks if: -// 1. The correct binary is produced -// 2. The deps file has the correct content -// 3. Fingerprinting works -// 4. `cargo clean` command works -fn binary_name1() { - // Create the project. - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["different-binary-name"] - - [project] - name = "foo" - version = "0.0.1" - - [[bin]] - name = "foo" - filename = "007bar" - path = "src/main.rs" - "#, - ) - .file("src/main.rs", "fn main() { assert!(true) }") - .build(); - - // Run cargo build. - p.cargo("build").masquerade_as_nightly_cargo().run(); - - // Check the name of the binary that cargo has generated. - // A binary with the name of the crate should NOT be created. - let foo_path = p.bin("foo"); - assert!(!foo_path.is_file()); - // A binary with the name provided in `filename` parameter should be created. - let bar_path = p.bin("007bar"); - assert!(bar_path.is_file()); - - // Check if deps file exists. - let deps_path = p.bin("007bar").with_extension("d"); - assert!(deps_path.is_file(), "{:?}", bar_path); - - let depinfo = p.read_file(deps_path.to_str().unwrap()); - - // Prepare what content we expect to be present in deps file. - let deps_exp = format!( - "{}: {}", - p.bin("007bar").to_str().unwrap(), - p.root().join("src").join("main.rs").to_str().unwrap() - ); - - // Compare actual deps content with expected deps content. - assert!( - depinfo.lines().any(|line| line == deps_exp), - "Content of `{}` is incorrect", - deps_path.to_string_lossy() - ); - - // Run cargo second time, to verify fingerprint. - p.cargo("build -p foo -v") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); - - // Run cargo clean. - p.cargo("clean -p foo").masquerade_as_nightly_cargo().run(); - - // Check if the appropriate file was removed. - assert!( - !bar_path.is_file(), - "`cargo clean` did not remove the correct files" - ); -} - -#[cargo_test] -// This test checks if: -// 1. Check `cargo run` -// 2. Check `cargo test` -// 3. Check `cargo install/uninstall` -fn binary_name2() { - // Create the project. - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["different-binary-name"] - - [project] - name = "foo" - version = "0.0.1" - - [[bin]] - name = "foo" - filename = "007bar" - "#, - ) - .file( - "src/main.rs", - r#" - fn hello(name: &str) -> String { - format!("Hello, {}!", name) - } - - fn main() { - println!("{}", hello("crabs")); - } - - #[cfg(test)] - mod tests { - use super::*; - - #[test] - fn check_crabs() { - assert_eq!(hello("crabs"), "Hello, crabs!"); - } - } - "#, - ) - .build(); - - // Run cargo build. - p.cargo("build").masquerade_as_nightly_cargo().run(); - - // Check the name of the binary that cargo has generated. - // A binary with the name of the crate should NOT be created. - let foo_path = p.bin("foo"); - assert!(!foo_path.is_file()); - // A binary with the name provided in `filename` parameter should be created. - let bar_path = p.bin("007bar"); - assert!(bar_path.is_file()); - - // Check if `cargo test` works - p.cargo("test") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test tests::check_crabs ... ok") - .run(); - - // Check if `cargo run` is able to execute the binary - p.cargo("run") - .masquerade_as_nightly_cargo() - .with_stdout("Hello, crabs!") - .run(); - - p.cargo("install").masquerade_as_nightly_cargo().run(); - - assert_has_installed_exe(cargo_home(), "007bar"); - - p.cargo("uninstall") - .with_stderr("[REMOVING] [ROOT]/home/.cargo/bin/007bar[EXE]") - .masquerade_as_nightly_cargo() - .run(); - - assert_has_not_installed_exe(cargo_home(), "007bar"); -} - -#[cargo_test] -fn check_env_vars() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["different-binary-name"] - - [project] - name = "foo" - version = "0.0.1" - - [[bin]] - name = "foo" - filename = "007bar" - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - println!("{}", option_env!("CARGO_BIN_NAME").unwrap()); - } - "#, - ) - .file( - "tests/integration.rs", - r#" - #[test] - fn check_env_vars2() { - let value = option_env!("CARGO_BIN_EXE_007bar").expect("Could not find environment variable."); - assert!(value.contains("007bar")); - } - "# - ) - .build(); - - // Run cargo build. - p.cargo("build").masquerade_as_nightly_cargo().run(); - p.cargo("run") - .masquerade_as_nightly_cargo() - .with_stdout("007bar") - .run(); - p.cargo("test") - .masquerade_as_nightly_cargo() - .with_status(0) - .run(); -} - -#[cargo_test] -fn check_msg_format_json() { - // Create the project. - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["different-binary-name"] - - [project] - name = "foo" - version = "0.0.1" - - [[bin]] - name = "foo" - filename = "007bar" - path = "src/main.rs" - "#, - ) - .file("src/main.rs", "fn main() { assert!(true) }") - .build(); - - let output = r#" -{ - "reason": "compiler-artifact", - "package_id": "foo 0.0.1 [..]", - "manifest_path": "[CWD]/Cargo.toml", - "target": "{...}", - "profile": "{...}", - "features": [], - "filenames": "{...}", - "executable": "[ROOT]/foo/target/debug/007bar[EXE]", - "fresh": false -} - -{"reason":"build-finished", "success":true} -"#; - - // Run cargo build. - p.cargo("build --message-format=json") - .masquerade_as_nightly_cargo() - .with_json(output) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build.rs deleted file mode 100644 index 1d623821d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build.rs +++ /dev/null @@ -1,5910 +0,0 @@ -//! Tests for the `cargo build` command. - -use cargo::{ - core::compiler::CompileMode, - core::{Shell, Workspace}, - ops::CompileOptions, - Config, -}; -use cargo_test_support::compare; -use cargo_test_support::paths::{root, CargoPathExt}; -use cargo_test_support::registry::Package; -use cargo_test_support::tools; -use cargo_test_support::{ - basic_bin_manifest, basic_lib_manifest, basic_manifest, cargo_exe, git, is_nightly, main_file, - paths, process, project, rustc_host, sleep_ms, symlink_supported, t, Execs, ProjectBuilder, -}; -use cargo_util::paths::dylib_path_envvar; -use std::env; -use std::fs; -use std::io::Read; -use std::process::Stdio; - -#[cargo_test] -fn cargo_compile_simple() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); -} - -#[cargo_test] -fn cargo_fail_with_no_stderr() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &String::from("refusal")) - .build(); - p.cargo("build --message-format=json") - .with_status(101) - .with_stderr_does_not_contain("--- stderr") - .run(); -} - -/// Checks that the `CARGO_INCREMENTAL` environment variable results in -/// `rustc` getting `-C incremental` passed to it. -#[cargo_test] -fn cargo_compile_incremental() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build -v") - .env("CARGO_INCREMENTAL", "1") - .with_stderr_contains( - "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n", - ) - .run(); - - p.cargo("test -v") - .env("CARGO_INCREMENTAL", "1") - .with_stderr_contains( - "[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n", - ) - .run(); -} - -#[cargo_test] -fn incremental_profile() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [profile.dev] - incremental = false - - [profile.release] - incremental = true - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .env_remove("CARGO_INCREMENTAL") - .with_stderr_does_not_contain("[..]C incremental=[..]") - .run(); - - p.cargo("build -v") - .env("CARGO_INCREMENTAL", "1") - .with_stderr_contains("[..]C incremental=[..]") - .run(); - - p.cargo("build --release -v") - .env_remove("CARGO_INCREMENTAL") - .with_stderr_contains("[..]C incremental=[..]") - .run(); - - p.cargo("build --release -v") - .env("CARGO_INCREMENTAL", "0") - .with_stderr_does_not_contain("[..]C incremental=[..]") - .run(); -} - -#[cargo_test] -fn incremental_config() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [build] - incremental = false - "#, - ) - .build(); - - p.cargo("build -v") - .env_remove("CARGO_INCREMENTAL") - .with_stderr_does_not_contain("[..]C incremental=[..]") - .run(); - - p.cargo("build -v") - .env("CARGO_INCREMENTAL", "1") - .with_stderr_contains("[..]C incremental=[..]") - .run(); -} - -#[cargo_test] -fn cargo_compile_with_workspace_excluded() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("build --workspace --exclude foo") - .with_stderr_does_not_contain("[..]virtual[..]") - .with_stderr_contains("[..]no packages to compile") - .with_status(101) - .run(); -} - -#[cargo_test] -fn cargo_compile_manifest_path() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build --manifest-path foo/Cargo.toml") - .cwd(p.root().parent().unwrap()) - .run(); - assert!(p.bin("foo").is_file()); -} - -#[cargo_test] -fn cargo_compile_with_invalid_manifest() { - let p = project().file("Cargo.toml", "").build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - virtual manifests must be configured with [workspace] -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_invalid_manifest2() { - let p = project() - .file( - "Cargo.toml", - " - [project] - foo = bar - ", - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - could not parse input as TOML - -Caused by: - invalid TOML value, did you mean to use a quoted string? at line 3 column 23 -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_invalid_manifest3() { - let p = project().file("src/Cargo.toml", "a = bar").build(); - - p.cargo("build --manifest-path src/Cargo.toml") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - could not parse input as TOML - -Caused by: - invalid TOML value, did you mean to use a quoted string? at line 1 column 5 -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_duplicate_build_targets() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "main" - path = "src/main.rs" - crate-type = ["dylib"] - - [dependencies] - "#, - ) - .file("src/main.rs", "#![allow(warnings)] fn main() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -warning: file found to be present in multiple build targets: [..]main.rs -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_invalid_version() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0")) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - unexpected end of input while parsing minor version number for key `package.version` -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_empty_package_name() { - let p = project() - .file("Cargo.toml", &basic_manifest("", "0.0.0")) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - package name cannot be an empty string -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_invalid_package_name() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo::bar", "0.0.0")) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - invalid character `:` in package name: `foo::bar`, [..] -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_invalid_bin_target_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - binary target names cannot be empty -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_forbidden_bin_target_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "build" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - the binary target name `build` is forbidden, it conflicts with with cargo's build directory names -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_bin_and_crate_type() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "the_foo_bin" - path = "src/foo.rs" - crate-type = ["cdylib", "rlib"] - "#, - ) - .file("src/foo.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - the target `the_foo_bin` is a binary and can't have any crate-types set \ -(currently \"cdylib, rlib\")", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_api_exposes_artifact_paths() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "the_foo_bin" - path = "src/bin.rs" - - [lib] - name = "the_foo_lib" - path = "src/foo.rs" - crate-type = ["cdylib", "rlib"] - "#, - ) - .file("src/foo.rs", "pub fn bar() {}") - .file("src/bin.rs", "pub fn main() {}") - .build(); - - let shell = Shell::from_write(Box::new(Vec::new())); - let config = Config::new(shell, env::current_dir().unwrap(), paths::home()); - let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap(); - let compile_options = CompileOptions::new(ws.config(), CompileMode::Build).unwrap(); - - let result = cargo::ops::compile(&ws, &compile_options).unwrap(); - - assert_eq!(1, result.binaries.len()); - assert!(result.binaries[0].path.exists()); - assert!(result.binaries[0] - .path - .to_str() - .unwrap() - .contains("the_foo_bin")); - - assert_eq!(1, result.cdylibs.len()); - // The exact library path varies by platform, but should certainly exist at least - assert!(result.cdylibs[0].path.exists()); - assert!(result.cdylibs[0] - .path - .to_str() - .unwrap() - .contains("the_foo_lib")); -} - -#[cargo_test] -fn cargo_compile_with_bin_and_proc() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "the_foo_bin" - path = "src/foo.rs" - proc-macro = true - "#, - ) - .file("src/foo.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - the target `the_foo_bin` is a binary and can't have `proc-macro` set `true`", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_invalid_lib_target_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [lib] - name = "" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - library target names cannot be empty -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_invalid_non_numeric_dep_version() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - crossbeam = "y" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[CWD]/Cargo.toml` - -Caused by: - failed to parse the version requirement `y` for dependency `crossbeam` - -Caused by: - unexpected character 'y' while parsing major version number -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_without_manifest() { - let p = project().no_manifest().build(); - - p.cargo("build") - .with_status(101) - .with_stderr("[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory") - .run(); -} - -#[cargo_test] -#[cfg(target_os = "linux")] -fn cargo_compile_with_lowercase_cargo_toml() { - let p = project() - .no_manifest() - .file("cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/lib.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory, \ - but found cargo.toml please try to rename it to Cargo.toml", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_invalid_code() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", "invalid rust code!") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains("[ERROR] could not compile `foo` due to previous error\n") - .run(); - assert!(p.root().join("Cargo.lock").is_file()); -} - -#[cargo_test] -fn cargo_compile_with_invalid_code_in_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - [dependencies.baz] - path = "../baz" - "#, - ) - .file("src/main.rs", "invalid rust code!") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "invalid rust code!") - .build(); - let _baz = project() - .at("baz") - .file("Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("src/lib.rs", "invalid rust code!") - .build(); - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]invalid rust code[..]") - .with_stderr_contains("[ERROR] could not compile [..]") - .run(); -} - -#[cargo_test] -fn cargo_compile_with_warnings_in_the_root_package() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", "fn main() {} fn dead() {}") - .build(); - - p.cargo("build") - .with_stderr_contains("[..]function is never used: `dead`[..]") - .run(); -} - -#[cargo_test] -fn cargo_compile_with_warnings_in_a_dep_package() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - - [[bin]] - - name = "foo" - "#, - ) - .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file( - "bar/src/bar.rs", - r#" - pub fn gimme() -> &'static str { - "test passed" - } - - fn dead() {} - "#, - ) - .build(); - - p.cargo("build") - .with_stderr_contains("[..]function is never used: `dead`[..]") - .run(); - - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("test passed\n").run(); -} - -#[cargo_test] -fn cargo_compile_with_nested_deps_inferred() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = 'bar' - - [[bin]] - name = "foo" - "#, - ) - .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - path = "../baz" - "#, - ) - .file( - "bar/src/lib.rs", - r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) - .file( - "baz/src/lib.rs", - r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#, - ) - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - assert!(!p.bin("libbar.rlib").is_file()); - assert!(!p.bin("libbaz.rlib").is_file()); - - p.process(&p.bin("foo")).with_stdout("test passed\n").run(); -} - -#[cargo_test] -fn cargo_compile_with_nested_deps_correct_bin() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - path = "../baz" - "#, - ) - .file( - "bar/src/lib.rs", - r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) - .file( - "baz/src/lib.rs", - r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#, - ) - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - assert!(!p.bin("libbar.rlib").is_file()); - assert!(!p.bin("libbaz.rlib").is_file()); - - p.process(&p.bin("foo")).with_stdout("test passed\n").run(); -} - -#[cargo_test] -fn cargo_compile_with_nested_deps_shorthand() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - path = "../baz" - - [lib] - - name = "bar" - "#, - ) - .file( - "bar/src/bar.rs", - r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#, - ) - .file("baz/Cargo.toml", &basic_lib_manifest("baz")) - .file( - "baz/src/baz.rs", - r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#, - ) - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - assert!(!p.bin("libbar.rlib").is_file()); - assert!(!p.bin("libbaz.rlib").is_file()); - - p.process(&p.bin("foo")).with_stdout("test passed\n").run(); -} - -#[cargo_test] -fn cargo_compile_with_nested_deps_longhand() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - version = "0.5.0" - - [[bin]] - - name = "foo" - "#, - ) - .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - path = "../baz" - version = "0.5.0" - - [lib] - - name = "bar" - "#, - ) - .file( - "bar/src/bar.rs", - r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#, - ) - .file("baz/Cargo.toml", &basic_lib_manifest("baz")) - .file( - "baz/src/baz.rs", - r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#, - ) - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - assert!(!p.bin("libbar.rlib").is_file()); - assert!(!p.bin("libbaz.rlib").is_file()); - - p.process(&p.bin("foo")).with_stdout("test passed\n").run(); -} - -// Check that Cargo gives a sensible error if a dependency can't be found -// because of a name mismatch. -#[cargo_test] -fn cargo_compile_with_dep_name_mismatch() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "foo" - version = "0.0.1" - authors = ["wycats@example.com"] - - [[bin]] - - name = "foo" - - [dependencies.notquitebar] - - path = "bar" - "#, - ) - .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"])) - .file("bar/Cargo.toml", &basic_bin_manifest("bar")) - .file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[])) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: no matching package named `notquitebar` found -location searched: [CWD]/bar -required by package `foo v0.0.1 ([CWD])` -", - ) - .run(); -} - -// Ensure that renamed deps have a valid name -#[cargo_test] -fn cargo_compile_with_invalid_dep_rename() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "buggin" - version = "0.1.0" - - [dependencies] - "haha this isn't a valid name ๐Ÿ›" = { package = "libc", version = "0.1" } - "#, - ) - .file("src/main.rs", &main_file(r#""What's good?""#, &[])) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - invalid character ` ` in dependency name: `haha this isn't a valid name ๐Ÿ›`, characters must be Unicode XID characters (numbers, `-`, `_`, or most letters) -", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_filename() { - let p = project() - .file("src/lib.rs", "") - .file( - "src/bin/a.rs", - r#" - extern crate foo; - fn main() { println!("hello a.rs"); } - "#, - ) - .file("examples/a.rs", r#"fn main() { println!("example"); }"#) - .build(); - - p.cargo("build --bin bin.rs") - .with_status(101) - .with_stderr("[ERROR] no bin target named `bin.rs`") - .run(); - - p.cargo("build --bin a.rs") - .with_status(101) - .with_stderr( - "\ -[ERROR] no bin target named `a.rs` - -Did you mean `a`?", - ) - .run(); - - p.cargo("build --example example.rs") - .with_status(101) - .with_stderr("[ERROR] no example target named `example.rs`") - .run(); - - p.cargo("build --example a.rs") - .with_status(101) - .with_stderr( - "\ -[ERROR] no example target named `a.rs` - -Did you mean `a`?", - ) - .run(); -} - -#[cargo_test] -fn incompatible_dependencies() { - Package::new("bad", "0.1.0").publish(); - Package::new("bad", "1.0.0").publish(); - Package::new("bad", "1.0.1").publish(); - Package::new("bad", "1.0.2").publish(); - Package::new("bar", "0.1.0").dep("bad", "0.1.0").publish(); - Package::new("baz", "0.1.1").dep("bad", "=1.0.0").publish(); - Package::new("baz", "0.1.0").dep("bad", "=1.0.0").publish(); - Package::new("qux", "0.1.2").dep("bad", ">=1.0.1").publish(); - Package::new("qux", "0.1.1").dep("bad", ">=1.0.1").publish(); - Package::new("qux", "0.1.0").dep("bad", ">=1.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = "0.1.0" - baz = "0.1.0" - qux = "0.1.0" - "#, - ) - .file("src/main.rs", "fn main(){}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "\ -error: failed to select a version for `bad`. - ... required by package `qux v0.1.0` - ... which satisfies dependency `qux = \"^0.1.0\"` of package `foo v0.0.1 ([..])` -versions that meet the requirements `>=1.0.1` are: 1.0.2, 1.0.1 - -all possible versions conflict with previously selected packages. - - previously selected package `bad v1.0.0` - ... which satisfies dependency `bad = \"=1.0.0\"` of package `baz v0.1.0` - ... which satisfies dependency `baz = \"^0.1.0\"` of package `foo v0.0.1 ([..])` - -failed to select a version for `bad` which could resolve this conflict", - ) - .run(); -} - -#[cargo_test] -fn incompatible_dependencies_with_multi_semver() { - Package::new("bad", "1.0.0").publish(); - Package::new("bad", "1.0.1").publish(); - Package::new("bad", "2.0.0").publish(); - Package::new("bad", "2.0.1").publish(); - Package::new("bar", "0.1.0").dep("bad", "=1.0.0").publish(); - Package::new("baz", "0.1.0").dep("bad", ">=2.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = "0.1.0" - baz = "0.1.0" - bad = ">=1.0.1, <=2.0.0" - "#, - ) - .file("src/main.rs", "fn main(){}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "\ -error: failed to select a version for `bad`. - ... required by package `foo v0.0.1 ([..])` -versions that meet the requirements `>=1.0.1, <=2.0.0` are: 2.0.0, 1.0.1 - -all possible versions conflict with previously selected packages. - - previously selected package `bad v2.0.1` - ... which satisfies dependency `bad = \">=2.0.1\"` of package `baz v0.1.0` - ... which satisfies dependency `baz = \"^0.1.0\"` of package `foo v0.0.1 ([..])` - - previously selected package `bad v1.0.0` - ... which satisfies dependency `bad = \"=1.0.0\"` of package `bar v0.1.0` - ... which satisfies dependency `bar = \"^0.1.0\"` of package `foo v0.0.1 ([..])` - -failed to select a version for `bad` which could resolve this conflict", - ) - .run(); -} - -#[cargo_test] -fn compile_path_dep_then_change_version() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.0.2")); - - p.cargo("build").run(); -} - -#[cargo_test] -fn ignores_carriage_return_in_lockfile() { - let p = project() - .file("src/main.rs", "mod a; fn main() {}") - .file("src/a.rs", "") - .build(); - - p.cargo("build").run(); - - let lock = p.read_lockfile(); - p.change_file("Cargo.lock", &lock.replace("\n", "\r\n")); - p.cargo("build").run(); -} - -#[cargo_test] -fn cargo_default_env_metadata_env_var() { - // Ensure that path dep + dylib + env_var get metadata - // (even though path_dep + dylib should not) - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/lib.rs", "// hi") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - crate_type = ["dylib"] - "#, - ) - .file("bar/src/lib.rs", "// hello") - .build(); - - // No metadata on libbar since it's a dylib path dependency - p.cargo("build -v") - .with_stderr(&format!( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type dylib \ - --emit=[..]link \ - -C prefer-dynamic[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - -C extra-filename=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps \ - --extern bar=[CWD]/target/debug/deps/{prefix}bar{suffix}` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - prefix = env::consts::DLL_PREFIX, - suffix = env::consts::DLL_SUFFIX, - )) - .run(); - - p.cargo("clean").run(); - - // If you set the env-var, then we expect metadata on libbar - p.cargo("build -v") - .env("__CARGO_DEFAULT_LIB_METADATA", "stable") - .with_stderr(&format!( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type dylib \ - --emit=[..]link \ - -C prefer-dynamic[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - -C extra-filename=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps \ - --extern bar=[CWD]/target/debug/deps/{prefix}bar-[..]{suffix}` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - prefix = env::consts::DLL_PREFIX, - suffix = env::consts::DLL_SUFFIX, - )) - .run(); -} - -#[cargo_test] -fn crate_env_vars() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.1-alpha.1" - description = "This is foo" - homepage = "https://example.com" - repository = "https://example.com/repo.git" - authors = ["wycats@example.com"] - license = "MIT OR Apache-2.0" - license-file = "license.txt" - - [[bin]] - name = "foo-bar" - path = "src/main.rs" - "#, - ) - .file( - "src/main.rs", - r#" - extern crate foo; - - - static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR"); - static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR"); - static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH"); - static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE"); - static VERSION: &'static str = env!("CARGO_PKG_VERSION"); - static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR"); - static PKG_NAME: &'static str = env!("CARGO_PKG_NAME"); - static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE"); - static REPOSITORY: &'static str = env!("CARGO_PKG_REPOSITORY"); - static LICENSE: &'static str = env!("CARGO_PKG_LICENSE"); - static LICENSE_FILE: &'static str = env!("CARGO_PKG_LICENSE_FILE"); - static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION"); - static BIN_NAME: &'static str = env!("CARGO_BIN_NAME"); - static CRATE_NAME: &'static str = env!("CARGO_CRATE_NAME"); - - - fn main() { - let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR, - VERSION_MINOR, VERSION_PATCH, VERSION_PRE, - CARGO_MANIFEST_DIR); - assert_eq!(s, foo::version()); - println!("{}", s); - assert_eq!("foo", PKG_NAME); - assert_eq!("foo-bar", BIN_NAME); - assert_eq!("foo_bar", CRATE_NAME); - assert_eq!("https://example.com", HOMEPAGE); - assert_eq!("https://example.com/repo.git", REPOSITORY); - assert_eq!("MIT OR Apache-2.0", LICENSE); - assert_eq!("license.txt", LICENSE_FILE); - assert_eq!("This is foo", DESCRIPTION); - let s = format!("{}.{}.{}-{}", VERSION_MAJOR, - VERSION_MINOR, VERSION_PATCH, VERSION_PRE); - assert_eq!(s, VERSION); - - // Verify CARGO_TARGET_TMPDIR isn't set for bins - assert!(option_env!("CARGO_TARGET_TMPDIR").is_none()); - } - "#, - ) - .file( - "src/lib.rs", - r#" - use std::env; - use std::path::PathBuf; - - pub fn version() -> String { - format!("{}-{}-{} @ {} in {}", - env!("CARGO_PKG_VERSION_MAJOR"), - env!("CARGO_PKG_VERSION_MINOR"), - env!("CARGO_PKG_VERSION_PATCH"), - env!("CARGO_PKG_VERSION_PRE"), - env!("CARGO_MANIFEST_DIR")) - } - - pub fn check_no_int_test_env() { - env::var("CARGO_TARGET_DIR").unwrap_err(); - } - - pub fn check_tmpdir(tmp: Option<&'static str>) { - let tmpdir: PathBuf = tmp.unwrap().into(); - - let exe: PathBuf = env::current_exe().unwrap().into(); - let mut expected: PathBuf = exe.parent().unwrap() - .parent().unwrap() - .parent().unwrap() - .into(); - expected.push("tmp"); - assert_eq!(tmpdir, expected); - - // Check that CARGO_TARGET_TMPDIR isn't set for lib code - assert!(option_env!("CARGO_TARGET_TMPDIR").is_none()); - env::var("CARGO_TARGET_TMPDIR").unwrap_err(); - } - - #[test] - fn env() { - // Check that CARGO_TARGET_TMPDIR isn't set for unit tests - assert!(option_env!("CARGO_TARGET_TMPDIR").is_none()); - env::var("CARGO_TARGET_TMPDIR").unwrap_err(); - } - "#, - ) - .file( - "tests/env.rs", - r#" - #[test] - fn env() { - foo::check_tmpdir(option_env!("CARGO_TARGET_TMPDIR")); - } - "#, - ); - - let p = if is_nightly() { - p.file( - "benches/env.rs", - r#" - #![feature(test)] - extern crate test; - use test::Bencher; - - #[bench] - fn env(_: &mut Bencher) { - foo::check_tmpdir(option_env!("CARGO_TARGET_TMPDIR")); - } - "#, - ) - .build() - } else { - p.build() - }; - - println!("build"); - p.cargo("build -v").run(); - - println!("bin"); - p.process(&p.bin("foo-bar")) - .with_stdout("0-5-1 @ alpha.1 in [CWD]") - .run(); - - println!("test"); - p.cargo("test -v").run(); - - if is_nightly() { - println!("bench"); - p.cargo("bench -v").run(); - } -} - -#[cargo_test] -fn crate_authors_env_vars() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.1-alpha.1" - authors = ["wycats@example.com", "neikos@example.com"] - "#, - ) - .file( - "src/main.rs", - r#" - extern crate foo; - - static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS"); - - fn main() { - let s = "wycats@example.com:neikos@example.com"; - assert_eq!(AUTHORS, foo::authors()); - println!("{}", AUTHORS); - assert_eq!(s, AUTHORS); - } - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn authors() -> String { - format!("{}", env!("CARGO_PKG_AUTHORS")) - } - "#, - ) - .build(); - - println!("build"); - p.cargo("build -v").run(); - - println!("bin"); - p.process(&p.bin("foo")) - .with_stdout("wycats@example.com:neikos@example.com") - .run(); - - println!("test"); - p.cargo("test -v").run(); -} - -#[cargo_test] -fn vv_prints_rustc_env_vars() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = ["escape='\"@example.com"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - let mut b = p.cargo("build -vv"); - - if cfg!(windows) { - b.with_stderr_contains( - "[RUNNING] `[..]set CARGO_PKG_NAME=foo&& [..]rustc [..]`" - ).with_stderr_contains( - r#"[RUNNING] `[..]set CARGO_PKG_AUTHORS="escape='\"@example.com"&& [..]rustc [..]`"# - ) - } else { - b.with_stderr_contains("[RUNNING] `[..]CARGO_PKG_NAME=foo [..]rustc [..]`") - .with_stderr_contains( - r#"[RUNNING] `[..]CARGO_PKG_AUTHORS='escape='\''"@example.com' [..]rustc [..]`"#, - ) - }; - - b.run(); -} - -// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error -fn setenv_for_removing_empty_component(mut execs: Execs) -> Execs { - let v = dylib_path_envvar(); - if let Ok(search_path) = env::var(v) { - let new_search_path = - env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty())) - .expect("join_paths"); - execs.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly - } - execs -} - -// Regression test for #4277 -#[cargo_test] -fn crate_library_path_env_var() { - let p = project() - .file( - "src/main.rs", - &format!( - r#" - fn main() {{ - let search_path = env!("{}"); - let paths = std::env::split_paths(&search_path).collect::>(); - assert!(!paths.contains(&"".into())); - }} - "#, - dylib_path_envvar() - ), - ) - .build(); - - setenv_for_removing_empty_component(p.cargo("run")).run(); -} - -// Regression test for #4277 -#[cargo_test] -fn build_with_fake_libc_not_loading() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .file("libc.so.6", r#""#) - .build(); - - setenv_for_removing_empty_component(p.cargo("build")).run(); -} - -// this is testing that src/.rs still works (for now) -#[cargo_test] -fn many_crate_types_old_style_lib_location() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "foo" - crate_type = ["rlib", "dylib"] - "#, - ) - .file("src/foo.rs", "pub fn foo() {}") - .build(); - p.cargo("build") - .with_stderr_contains( - "\ -[WARNING] path `[..]src/foo.rs` was erroneously implicitly accepted for library `foo`, -please rename the file to `src/lib.rs` or set lib.path in Cargo.toml", - ) - .run(); - - assert!(p.root().join("target/debug/libfoo.rlib").is_file()); - let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); - assert!(p.root().join("target/debug").join(&fname).is_file()); -} - -#[cargo_test] -fn many_crate_types_correct() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - - name = "foo" - crate_type = ["rlib", "dylib"] - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .build(); - p.cargo("build").run(); - - assert!(p.root().join("target/debug/libfoo.rlib").is_file()); - let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); - assert!(p.root().join("target/debug").join(&fname).is_file()); -} - -#[cargo_test] -fn self_dependency() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [dependencies.test] - - path = "." - - [lib] - name = "test" - path = "src/test.rs" - "#, - ) - .file("src/test.rs", "fn main() {}") - .build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] cyclic package dependency: package `test v0.0.0 ([CWD])` depends on itself. Cycle: -package `test v0.0.0 ([CWD])` - ... which satisfies path dependency `test` of package `test v0.0.0 ([..])`", - ) - .run(); -} - -#[cargo_test] -/// Make sure broken and loop symlinks don't break the build -/// -/// This test requires you to be able to make symlinks. -/// For windows, this may require you to enable developer mode. -fn ignore_broken_symlinks() { - if !symlink_supported() { - return; - } - - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .symlink("Notafile", "bar") - // To hit the symlink directory, we need a build script - // to trigger a full scan of package files. - .file("build.rs", &main_file(r#""build script""#, &[])) - .symlink_dir("a/b", "a/b/c/d/foo") - .build(); - - p.cargo("build") - .with_stderr_contains( - "[WARNING] File system loop found: [..]/a/b/c/d/foo points to an ancestor [..]/a/b", - ) - .run(); - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); -} - -#[cargo_test] -fn missing_lib_and_bin() { - let p = project().build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]Cargo.toml` - -Caused by: - no targets specified in the manifest - either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n", - ) - .run(); -} - -#[cargo_test] -fn lto_build() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [profile.release] - lto = true - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - p.cargo("build -v --release") - .with_stderr( - "\ -[COMPILING] test v0.0.0 ([CWD]) -[RUNNING] `rustc --crate-name test src/main.rs [..]--crate-type bin \ - --emit=[..]link \ - -C opt-level=3 \ - -C lto \ - [..] -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn verbose_build() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn verbose_release_build() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("build -v --release") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]\ - -C opt-level=3[..]\ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/release/deps` -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn verbose_release_build_short() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("build -v -r") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]\ - -C opt-level=3[..]\ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/release/deps` -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn verbose_release_build_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [dependencies.foo] - path = "foo" - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - - name = "foo" - version = "0.0.0" - authors = [] - - [lib] - name = "foo" - crate_type = ["dylib", "rlib"] - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - p.cargo("build -v --release") - .with_stderr(&format!( - "\ -[COMPILING] foo v0.0.0 ([CWD]/foo) -[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ - --crate-type dylib --crate-type rlib \ - --emit=[..]link \ - -C prefer-dynamic[..]\ - -C opt-level=3[..]\ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/release/deps` -[COMPILING] test v0.0.0 ([CWD]) -[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]\ - -C opt-level=3[..]\ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/release/deps \ - --extern foo=[CWD]/target/release/deps/{prefix}foo{suffix} \ - --extern foo=[CWD]/target/release/deps/libfoo.rlib` -[FINISHED] release [optimized] target(s) in [..] -", - prefix = env::consts::DLL_PREFIX, - suffix = env::consts::DLL_SUFFIX - )) - .run(); -} - -#[cargo_test] -fn explicit_examples() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - authors = [] - - [lib] - name = "foo" - path = "src/lib.rs" - - [[example]] - name = "hello" - path = "examples/ex-hello.rs" - - [[example]] - name = "goodbye" - path = "examples/ex-goodbye.rs" - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn get_hello() -> &'static str { "Hello" } - pub fn get_goodbye() -> &'static str { "Goodbye" } - pub fn get_world() -> &'static str { "World" } - "#, - ) - .file( - "examples/ex-hello.rs", - r#" - extern crate foo; - fn main() { println!("{}, {}!", foo::get_hello(), foo::get_world()); } - "#, - ) - .file( - "examples/ex-goodbye.rs", - r#" - extern crate foo; - fn main() { println!("{}, {}!", foo::get_goodbye(), foo::get_world()); } - "#, - ) - .build(); - - p.cargo("build --examples").run(); - p.process(&p.bin("examples/hello")) - .with_stdout("Hello, World!\n") - .run(); - p.process(&p.bin("examples/goodbye")) - .with_stdout("Goodbye, World!\n") - .run(); -} - -#[cargo_test] -fn non_existing_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [lib] - name = "foo" - path = "src/lib.rs" - - [[test]] - name = "hello" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --tests -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `hello` test at `tests/hello.rs` or `tests/hello/main.rs`. \ - Please specify test.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn non_existing_example() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [lib] - name = "foo" - path = "src/lib.rs" - - [[example]] - name = "hello" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --examples -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `hello` example at `examples/hello.rs` or `examples/hello/main.rs`. \ - Please specify example.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn non_existing_benchmark() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [lib] - name = "foo" - path = "src/lib.rs" - - [[bench]] - name = "hello" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --benches -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `hello` bench at `benches/hello.rs` or `benches/hello/main.rs`. \ - Please specify bench.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn non_existing_binary() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/lib.rs", "") - .file("src/bin/ehlo.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `foo` bin at `src/bin/foo.rs` or `src/bin/foo/main.rs`. \ - Please specify bin.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn commonly_wrong_path_of_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [lib] - name = "foo" - path = "src/lib.rs" - - [[test]] - name = "foo" - "#, - ) - .file("src/lib.rs", "") - .file("test/foo.rs", "") - .build(); - - p.cargo("build --tests -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `foo` test at default paths, but found a file at `test/foo.rs`. - Perhaps rename the file to `tests/foo.rs` for target auto-discovery, \ - or specify test.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn commonly_wrong_path_of_example() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [lib] - name = "foo" - path = "src/lib.rs" - - [[example]] - name = "foo" - "#, - ) - .file("src/lib.rs", "") - .file("example/foo.rs", "") - .build(); - - p.cargo("build --examples -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `foo` example at default paths, but found a file at `example/foo.rs`. - Perhaps rename the file to `examples/foo.rs` for target auto-discovery, \ - or specify example.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn commonly_wrong_path_of_benchmark() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [lib] - name = "foo" - path = "src/lib.rs" - - [[bench]] - name = "foo" - "#, - ) - .file("src/lib.rs", "") - .file("bench/foo.rs", "") - .build(); - - p.cargo("build --benches -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `foo` bench at default paths, but found a file at `bench/foo.rs`. - Perhaps rename the file to `benches/foo.rs` for target auto-discovery, \ - or specify bench.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn commonly_wrong_path_binary() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/lib.rs", "") - .file("src/bins/foo.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `foo` bin at default paths, but found a file at `src/bins/foo.rs`. - Perhaps rename the file to `src/bin/foo.rs` for target auto-discovery, \ - or specify bin.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn commonly_wrong_path_subdir_binary() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/lib.rs", "") - .file("src/bins/foo/main.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `foo` bin at default paths, but found a file at `src/bins/foo/main.rs`. - Perhaps rename the file to `src/bin/foo/main.rs` for target auto-discovery, \ - or specify bin.path if you want to use a non-default path.", - ) - .run(); -} - -#[cargo_test] -fn found_multiple_target_files() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/lib.rs", "") - .file("src/bin/foo.rs", "") - .file("src/bin/foo/main.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - // Don't assert the inferred pathes since the order is non-deterministic. - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - cannot infer path for `foo` bin - Cargo doesn't know which to use because multiple target files found \ - at `src/bin/foo[..].rs` and `src/bin/foo[..].rs`.", - ) - .run(); -} - -#[cargo_test] -fn legacy_binary_paths_warnings() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - authors = [] - - [[bin]] - name = "bar" - "#, - ) - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .with_stderr_contains( - "\ -[WARNING] path `[..]src/main.rs` was erroneously implicitly accepted for binary `bar`, -please set bin.path in Cargo.toml", - ) - .run(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - authors = [] - - [[bin]] - name = "bar" - "#, - ) - .file("src/lib.rs", "") - .file("src/bin/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .with_stderr_contains( - "\ -[WARNING] path `[..]src/bin/main.rs` was erroneously implicitly accepted for binary `bar`, -please set bin.path in Cargo.toml", - ) - .run(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - authors = [] - - [[bin]] - name = "bar" - "#, - ) - .file("src/bar.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .with_stderr_contains( - "\ -[WARNING] path `[..]src/bar.rs` was erroneously implicitly accepted for binary `bar`, -please set bin.path in Cargo.toml", - ) - .run(); -} - -#[cargo_test] -fn implicit_examples() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn get_hello() -> &'static str { "Hello" } - pub fn get_goodbye() -> &'static str { "Goodbye" } - pub fn get_world() -> &'static str { "World" } - "#, - ) - .file( - "examples/hello.rs", - r#" - extern crate foo; - fn main() { - println!("{}, {}!", foo::get_hello(), foo::get_world()); - } - "#, - ) - .file( - "examples/goodbye.rs", - r#" - extern crate foo; - fn main() { - println!("{}, {}!", foo::get_goodbye(), foo::get_world()); - } - "#, - ) - .build(); - - p.cargo("build --examples").run(); - p.process(&p.bin("examples/hello")) - .with_stdout("Hello, World!\n") - .run(); - p.process(&p.bin("examples/goodbye")) - .with_stdout("Goodbye, World!\n") - .run(); -} - -#[cargo_test] -fn standard_build_no_ndebug() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/foo.rs", - r#" - fn main() { - if cfg!(debug_assertions) { - println!("slow") - } else { - println!("fast") - } - } - "#, - ) - .build(); - - p.cargo("build").run(); - p.process(&p.bin("foo")).with_stdout("slow\n").run(); -} - -#[cargo_test] -fn release_build_ndebug() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/foo.rs", - r#" - fn main() { - if cfg!(debug_assertions) { - println!("slow") - } else { - println!("fast") - } - } - "#, - ) - .build(); - - p.cargo("build --release").run(); - p.process(&p.release_bin("foo")).with_stdout("fast\n").run(); -} - -#[cargo_test] -fn inferred_main_bin() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("build").run(); - p.process(&p.bin("foo")).run(); -} - -#[cargo_test] -fn deletion_causes_failure() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1")); - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]can't find crate for `bar`") - .run(); -} - -#[cargo_test] -fn bad_cargo_toml_in_target_dir() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("target/Cargo.toml", "bad-toml") - .build(); - - p.cargo("build").run(); - p.process(&p.bin("foo")).run(); -} - -#[cargo_test] -fn lib_with_standard_name() { - let p = project() - .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/main.rs", - "extern crate syntax; fn main() { syntax::foo() }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] syntax v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn simple_staticlib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [lib] - name = "foo" - crate-type = ["staticlib"] - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .build(); - - // env var is a test for #1381 - p.cargo("build").env("CARGO_LOG", "nekoneko=trace").run(); -} - -#[cargo_test] -fn staticlib_rlib_and_bin() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [lib] - name = "foo" - crate-type = ["staticlib", "rlib"] - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file("src/main.rs", "extern crate foo; fn main() { foo::foo(); }") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn opt_out_of_bin() { - let p = project() - .file( - "Cargo.toml", - r#" - bin = [] - - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#, - ) - .file("src/lib.rs", "") - .file("src/main.rs", "bad syntax") - .build(); - p.cargo("build").run(); -} - -#[cargo_test] -fn single_lib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [lib] - name = "foo" - path = "src/bar.rs" - "#, - ) - .file("src/bar.rs", "") - .build(); - p.cargo("build").run(); -} - -#[cargo_test] -fn freshness_ignores_excluded() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - exclude = ["src/b*.rs"] - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - .build(); - foo.root().move_into_the_past(); - - foo.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - // Smoke test to make sure it doesn't compile again - println!("first pass"); - foo.cargo("build").with_stdout("").run(); - - // Modify an ignored file and make sure we don't rebuild - println!("second pass"); - foo.change_file("src/bar.rs", ""); - foo.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn rebuild_preserves_out_dir() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = 'build.rs' - "#, - ) - .file( - "build.rs", - r#" - use std::env; - use std::fs::File; - use std::path::Path; - - fn main() { - let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo"); - if env::var_os("FIRST").is_some() { - File::create(&path).unwrap(); - } else { - File::create(&path).unwrap(); - } - } - "#, - ) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - .build(); - foo.root().move_into_the_past(); - - foo.cargo("build") - .env("FIRST", "1") - .with_stderr( - "\ -[COMPILING] foo v0.0.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - foo.change_file("src/bar.rs", ""); - foo.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn dep_no_libs() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.0")) - .file("bar/src/main.rs", "") - .build(); - foo.cargo("build").run(); -} - -#[cargo_test] -fn recompile_space_in_name() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [lib] - name = "foo" - path = "src/my lib.rs" - "#, - ) - .file("src/my lib.rs", "") - .build(); - foo.cargo("build").run(); - foo.root().move_into_the_past(); - foo.cargo("build").with_stdout("").run(); -} - -#[cfg(unix)] -#[cargo_test] -fn credentials_is_unreadable() { - use cargo_test_support::paths::home; - use std::os::unix::prelude::*; - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/lib.rs", "") - .build(); - - let credentials = home().join(".cargo/credentials"); - t!(fs::create_dir_all(credentials.parent().unwrap())); - t!(fs::write( - &credentials, - r#" - [registry] - token = "api-token" - "# - )); - let stat = fs::metadata(credentials.as_path()).unwrap(); - let mut perms = stat.permissions(); - perms.set_mode(0o000); - fs::set_permissions(credentials, perms).unwrap(); - - p.cargo("build").run(); -} - -#[cfg(unix)] -#[cargo_test] -fn ignore_bad_directories() { - use std::os::unix::prelude::*; - let foo = project() - .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) - .file("src/lib.rs", "") - .build(); - let dir = foo.root().join("tmp"); - fs::create_dir(&dir).unwrap(); - let stat = fs::metadata(&dir).unwrap(); - let mut perms = stat.permissions(); - perms.set_mode(0o644); - fs::set_permissions(&dir, perms.clone()).unwrap(); - foo.cargo("build").run(); - perms.set_mode(0o755); - fs::set_permissions(&dir, perms).unwrap(); -} - -#[cargo_test] -fn bad_cargo_config() { - let foo = project() - .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) - .file("src/lib.rs", "") - .file(".cargo/config", "this is not valid toml") - .build(); - foo.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] could not load Cargo configuration - -Caused by: - could not parse TOML configuration in `[..]` - -Caused by: - could not parse input as TOML - -Caused by: - expected an equals, found an identifier at line 1 column 6 -", - ) - .run(); -} - -#[cargo_test] -fn cargo_platform_specific_dependency() { - let host = rustc_host(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - - [target.{host}.dependencies] - dep = {{ path = "dep" }} - [target.{host}.build-dependencies] - build = {{ path = "build" }} - [target.{host}.dev-dependencies] - dev = {{ path = "dev" }} - "#, - host = host - ), - ) - .file("src/main.rs", "extern crate dep; fn main() { dep::dep() }") - .file( - "tests/foo.rs", - "extern crate dev; #[test] fn foo() { dev::dev() }", - ) - .file( - "build.rs", - "extern crate build; fn main() { build::build(); }", - ) - .file("dep/Cargo.toml", &basic_manifest("dep", "0.5.0")) - .file("dep/src/lib.rs", "pub fn dep() {}") - .file("build/Cargo.toml", &basic_manifest("build", "0.5.0")) - .file("build/src/lib.rs", "pub fn build() {}") - .file("dev/Cargo.toml", &basic_manifest("dev", "0.5.0")) - .file("dev/src/lib.rs", "pub fn dev() {}") - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - p.cargo("test").run(); -} - -#[cargo_test] -fn bad_platform_specific_dependency() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [target.wrong-target.dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file( - "bar/src/lib.rs", - r#"pub fn gimme() -> String { format!("") }"#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]can't find crate for `bar`") - .run(); -} - -#[cargo_test] -fn cargo_platform_specific_dependency_wrong_platform() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [target.non-existing-triplet.dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file( - "bar/src/lib.rs", - "invalid rust file, should not be compiled", - ) - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - p.process(&p.bin("foo")).run(); - - let lockfile = p.read_lockfile(); - assert!(lockfile.contains("bar")); -} - -#[cargo_test] -fn example_as_lib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["lib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .build(); - - p.cargo("build --example=ex").run(); - assert!(p.example_lib("ex", "lib").is_file()); -} - -#[cargo_test] -fn example_as_rlib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["rlib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .build(); - - p.cargo("build --example=ex").run(); - assert!(p.example_lib("ex", "rlib").is_file()); -} - -#[cargo_test] -fn example_as_dylib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["dylib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .build(); - - p.cargo("build --example=ex").run(); - assert!(p.example_lib("ex", "dylib").is_file()); -} - -#[cargo_test] -fn example_as_proc_macro() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["proc-macro"] - "#, - ) - .file("src/lib.rs", "") - .file( - "examples/ex.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro] - pub fn eat(_item: TokenStream) -> TokenStream { - "".parse().unwrap() - } - "#, - ) - .build(); - - p.cargo("build --example=ex").run(); - assert!(p.example_lib("ex", "proc-macro").is_file()); -} - -#[cargo_test] -fn example_bin_same_name() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("examples/foo.rs", "fn main() {}") - .build(); - - p.cargo("build --examples").run(); - - assert!(!p.bin("foo").is_file()); - // We expect a file of the form bin/foo-{metadata_hash} - assert!(p.bin("examples/foo").is_file()); - - p.cargo("build --examples").run(); - - assert!(!p.bin("foo").is_file()); - // We expect a file of the form bin/foo-{metadata_hash} - assert!(p.bin("examples/foo").is_file()); -} - -#[cargo_test] -fn compile_then_delete() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("run -v").run(); - assert!(p.bin("foo").is_file()); - if cfg!(windows) { - // On windows unlinking immediately after running often fails, so sleep - sleep_ms(100); - } - fs::remove_file(&p.bin("foo")).unwrap(); - p.cargo("run -v").run(); -} - -#[cargo_test] -fn transitive_dependencies_not_available() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.aaaaa] - path = "a" - "#, - ) - .file( - "src/main.rs", - "extern crate bbbbb; extern crate aaaaa; fn main() {}", - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "aaaaa" - version = "0.0.1" - authors = [] - - [dependencies.bbbbb] - path = "../b" - "#, - ) - .file("a/src/lib.rs", "extern crate bbbbb;") - .file("b/Cargo.toml", &basic_manifest("bbbbb", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains("[..] can't find crate for `bbbbb`[..]") - .run(); -} - -#[cargo_test] -fn cyclic_deps_rejected() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies.foo] - path = ".." - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( -"[ERROR] cyclic package dependency: package `a v0.0.1 ([CWD]/a)` depends on itself. Cycle: -package `a v0.0.1 ([CWD]/a)` - ... which satisfies path dependency `a` of package `foo v0.0.1 ([CWD])` - ... which satisfies path dependency `foo` of package `a v0.0.1 ([..])`", - ).run(); -} - -#[cargo_test] -fn predictable_filenames() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate-type = ["dylib", "rlib"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v").run(); - assert!(p.root().join("target/debug/libfoo.rlib").is_file()); - let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); - assert!(p.root().join("target/debug").join(dylib_name).is_file()); -} - -#[cargo_test] -fn dashes_to_underscores() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1")) - .file("src/lib.rs", "") - .file("src/main.rs", "extern crate foo_bar; fn main() {}") - .build(); - - p.cargo("build -v").run(); - assert!(p.bin("foo-bar").is_file()); -} - -#[cargo_test] -fn dashes_in_crate_name_bad() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo-bar" - "#, - ) - .file("src/lib.rs", "") - .file("src/main.rs", "extern crate foo_bar; fn main() {}") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - library target names cannot contain hyphens: foo-bar -", - ) - .run(); -} - -#[cargo_test] -fn rustc_env_var() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build -v") - .env("RUSTC", "rustc-that-does-not-exist") - .with_status(101) - .with_stderr( - "\ -[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..]) - -Caused by: -[..] -", - ) - .run(); - assert!(!p.bin("a").is_file()); -} - -#[cargo_test] -fn filtering() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("src/bin/b.rs", "fn main() {}") - .file("examples/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .build(); - - p.cargo("build --lib").run(); - assert!(!p.bin("a").is_file()); - - p.cargo("build --bin=a --example=a").run(); - assert!(p.bin("a").is_file()); - assert!(!p.bin("b").is_file()); - assert!(p.bin("examples/a").is_file()); - assert!(!p.bin("examples/b").is_file()); -} - -#[cargo_test] -fn filtering_implicit_bins() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("src/bin/b.rs", "fn main() {}") - .file("examples/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .build(); - - p.cargo("build --bins").run(); - assert!(p.bin("a").is_file()); - assert!(p.bin("b").is_file()); - assert!(!p.bin("examples/a").is_file()); - assert!(!p.bin("examples/b").is_file()); -} - -#[cargo_test] -fn filtering_implicit_examples() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("src/bin/b.rs", "fn main() {}") - .file("examples/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .build(); - - p.cargo("build --examples").run(); - assert!(!p.bin("a").is_file()); - assert!(!p.bin("b").is_file()); - assert!(p.bin("examples/a").is_file()); - assert!(p.bin("examples/b").is_file()); -} - -#[cargo_test] -fn ignore_dotfile() { - let p = project() - .file("src/bin/.a.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn ignore_dotdirs() { - let p = project() - .file("src/bin/a.rs", "fn main() {}") - .file(".git/Cargo.toml", "") - .file(".pc/dummy-fix.patch/Cargo.toml", "") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn dotdir_root() { - let p = ProjectBuilder::new(root().join(".foo")) - .file("src/bin/a.rs", "fn main() {}") - .build(); - p.cargo("build").run(); -} - -#[cargo_test] -fn custom_target_dir_env() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); - - p.cargo("build").env("CARGO_TARGET_DIR", "foo/target").run(); - assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); - assert!(!p.root().join("target/debug").join(&exe_name).is_file()); - - p.cargo("build").run(); - assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); - assert!(p.root().join("target/debug").join(&exe_name).is_file()); - - p.cargo("build") - .env("CARGO_BUILD_TARGET_DIR", "foo2/target") - .run(); - assert!(p.root().join("foo2/target/debug").join(&exe_name).is_file()); - - p.change_file( - ".cargo/config", - r#" - [build] - target-dir = "foo/target" - "#, - ); - p.cargo("build").env("CARGO_TARGET_DIR", "bar/target").run(); - assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); - assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); - assert!(p.root().join("target/debug").join(&exe_name).is_file()); -} - -#[cargo_test] -fn custom_target_dir_line_parameter() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - let exe_name = format!("foo{}", env::consts::EXE_SUFFIX); - - p.cargo("build --target-dir foo/target").run(); - assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); - assert!(!p.root().join("target/debug").join(&exe_name).is_file()); - - p.cargo("build").run(); - assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); - assert!(p.root().join("target/debug").join(&exe_name).is_file()); - - p.change_file( - ".cargo/config", - r#" - [build] - target-dir = "foo/target" - "#, - ); - p.cargo("build --target-dir bar/target").run(); - assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); - assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); - assert!(p.root().join("target/debug").join(&exe_name).is_file()); - - p.cargo("build --target-dir foobar/target") - .env("CARGO_TARGET_DIR", "bar/target") - .run(); - assert!(p - .root() - .join("foobar/target/debug") - .join(&exe_name) - .is_file()); - assert!(p.root().join("bar/target/debug").join(&exe_name).is_file()); - assert!(p.root().join("foo/target/debug").join(&exe_name).is_file()); - assert!(p.root().join("target/debug").join(&exe_name).is_file()); -} - -#[cargo_test] -fn build_multiple_packages() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - [dependencies.d2] - path = "d2" - - [[bin]] - name = "foo" - "#, - ) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .file("d1/Cargo.toml", &basic_bin_manifest("d1")) - .file("d1/src/lib.rs", "") - .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") - .file( - "d2/Cargo.toml", - r#" - [package] - name = "d2" - version = "0.0.1" - authors = [] - - [[bin]] - name = "d2" - doctest = false - "#, - ) - .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") - .build(); - - p.cargo("build -p d1 -p d2 -p foo").run(); - - assert!(p.bin("foo").is_file()); - p.process(&p.bin("foo")).with_stdout("i am foo\n").run(); - - let d1_path = &p - .build_dir() - .join("debug") - .join(format!("d1{}", env::consts::EXE_SUFFIX)); - let d2_path = &p - .build_dir() - .join("debug") - .join(format!("d2{}", env::consts::EXE_SUFFIX)); - - assert!(d1_path.is_file()); - p.process(d1_path).with_stdout("d1").run(); - - assert!(d2_path.is_file()); - p.process(d2_path).with_stdout("d2").run(); -} - -#[cargo_test] -fn invalid_spec() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - - [[bin]] - name = "foo" - "#, - ) - .file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[])) - .file("d1/Cargo.toml", &basic_bin_manifest("d1")) - .file("d1/src/lib.rs", "") - .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") - .build(); - - p.cargo("build -p notAValidDep") - .with_status(101) - .with_stderr("[ERROR] package ID specification `notAValidDep` did not match any packages") - .run(); - - p.cargo("build -p d1 -p notAValidDep") - .with_status(101) - .with_stderr("[ERROR] package ID specification `notAValidDep` did not match any packages") - .run(); -} - -#[cargo_test] -fn manifest_with_bom_is_ok() { - let p = project() - .file( - "Cargo.toml", - "\u{FEFF} - [package] - name = \"foo\" - version = \"0.0.1\" - authors = [] - ", - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn panic_abort_compiles_with_panic_abort() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.dev] - panic = 'abort' - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build -v") - .with_stderr_contains("[..] -C panic=abort [..]") - .run(); -} - -#[cargo_test] -fn compiler_json_error_format() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - "build.rs", - "fn main() { println!(\"cargo:rustc-cfg=xyz\") }", - ) - .file("src/main.rs", "fn main() { let unused = 92; }") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("bar/src/lib.rs", r#"fn dead() {}"#) - .build(); - - let output = |fresh| { - r#" - { - "reason":"compiler-artifact", - "package_id":"foo 0.5.0 ([..])", - "manifest_path": "[..]", - "target":{ - "kind":["custom-build"], - "crate_types":["bin"], - "doc": false, - "doctest": false, - "edition": "2015", - "name":"build-script-build", - "src_path":"[..]build.rs", - "test": false - }, - "profile": { - "debug_assertions": true, - "debuginfo": 2, - "opt_level": "0", - "overflow_checks": true, - "test": false - }, - "executable": null, - "features": [], - "filenames": "{...}", - "fresh": $FRESH - } - - { - "reason":"compiler-message", - "package_id":"bar 0.5.0 ([..])", - "manifest_path": "[..]", - "target":{ - "kind":["lib"], - "crate_types":["lib"], - "doc": true, - "doctest": true, - "edition": "2015", - "name":"bar", - "src_path":"[..]lib.rs", - "test": true - }, - "message":"{...}" - } - - { - "reason":"compiler-artifact", - "profile": { - "debug_assertions": true, - "debuginfo": 2, - "opt_level": "0", - "overflow_checks": true, - "test": false - }, - "executable": null, - "features": [], - "package_id":"bar 0.5.0 ([..])", - "manifest_path": "[..]", - "target":{ - "kind":["lib"], - "crate_types":["lib"], - "doc": true, - "doctest": true, - "edition": "2015", - "name":"bar", - "src_path":"[..]lib.rs", - "test": true - }, - "filenames":[ - "[..].rlib", - "[..].rmeta" - ], - "fresh": $FRESH - } - - { - "reason":"build-script-executed", - "package_id":"foo 0.5.0 ([..])", - "linked_libs":[], - "linked_paths":[], - "env":[], - "cfgs":["xyz"], - "out_dir": "[..]target/debug/build/foo-[..]/out" - } - - { - "reason":"compiler-message", - "package_id":"foo 0.5.0 ([..])", - "manifest_path": "[..]", - "target":{ - "kind":["bin"], - "crate_types":["bin"], - "doc": true, - "doctest": false, - "edition": "2015", - "name":"foo", - "src_path":"[..]main.rs", - "test": true - }, - "message":"{...}" - } - - { - "reason":"compiler-artifact", - "package_id":"foo 0.5.0 ([..])", - "manifest_path": "[..]", - "target":{ - "kind":["bin"], - "crate_types":["bin"], - "doc": true, - "doctest": false, - "edition": "2015", - "name":"foo", - "src_path":"[..]main.rs", - "test": true - }, - "profile": { - "debug_assertions": true, - "debuginfo": 2, - "opt_level": "0", - "overflow_checks": true, - "test": false - }, - "executable": "[..]/foo/target/debug/foo[EXE]", - "features": [], - "filenames": "{...}", - "fresh": $FRESH - } - - {"reason": "build-finished", "success": true} - "# - .replace("$FRESH", fresh) - }; - - // Use `jobs=1` to ensure that the order of messages is consistent. - p.cargo("build -v --message-format=json --jobs=1") - .with_json_contains_unordered(&output("false")) - .run(); - - // With fresh build, we should repeat the artifacts, - // and replay the cached compiler warnings. - p.cargo("build -v --message-format=json --jobs=1") - .with_json_contains_unordered(&output("true")) - .run(); -} - -#[cargo_test] -fn wrong_message_format_option() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --message-format XML") - .with_status(101) - .with_stderr_contains( - "\ -error: invalid message format specifier: `xml` -", - ) - .run(); -} - -#[cargo_test] -fn message_format_json_forward_stderr() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() { let unused = 0; }") - .build(); - - p.cargo("rustc --release --bin foo --message-format JSON") - .with_json_contains_unordered( - r#" - { - "reason":"compiler-message", - "package_id":"foo 0.5.0 ([..])", - "manifest_path": "[..]", - "target":{ - "kind":["bin"], - "crate_types":["bin"], - "doc": true, - "doctest": false, - "edition": "2015", - "name":"foo", - "src_path":"[..]", - "test": true - }, - "message":"{...}" - } - - { - "reason":"compiler-artifact", - "package_id":"foo 0.5.0 ([..])", - "manifest_path": "[..]", - "target":{ - "kind":["bin"], - "crate_types":["bin"], - "doc": true, - "doctest": false, - "edition": "2015", - "name":"foo", - "src_path":"[..]", - "test": true - }, - "profile":{ - "debug_assertions":false, - "debuginfo":null, - "opt_level":"3", - "overflow_checks": false, - "test":false - }, - "executable": "{...}", - "features":[], - "filenames": "{...}", - "fresh": false - } - - {"reason": "build-finished", "success": true} - "#, - ) - .run(); -} - -#[cargo_test] -fn no_warn_about_package_metadata() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [package.metadata] - foo = "bar" - a = true - b = 3 - - [package.metadata.another] - bar = 3 - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build") - .with_stderr( - "[..] foo v0.0.1 ([..])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn no_warn_about_workspace_metadata() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - - [workspace.metadata] - something = "something_else" - x = 1 - y = 2 - - [workspace.metadata.another] - bar = 12 - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "[..] foo v0.0.1 ([..])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn cargo_build_empty_target() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --target") - .arg("") - .with_status(101) - .with_stderr_contains("[..] target was empty") - .run(); -} - -#[cargo_test] -fn build_all_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build --workspace") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 ([..]) -[COMPILING] foo v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_all_exclude() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("build --workspace --exclude baz") - .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]") - .with_stderr_unordered( - "\ -[COMPILING] foo v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_all_exclude_not_found() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build --workspace --exclude baz") - .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]") - .with_stderr_unordered( - "\ -[WARNING] excluded package(s) `baz` not found in workspace [..] -[COMPILING] foo v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_all_exclude_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("build --workspace --exclude '*z'") - .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]") - .with_stderr_unordered( - "\ -[COMPILING] foo v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_all_exclude_glob_not_found() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build --workspace --exclude '*z'") - .with_stderr_does_not_contain("[COMPILING] baz v0.1.0 [..]") - .with_stderr( - "\ -[WARNING] excluded package pattern(s) `*z` not found in workspace [..] -[COMPILING] [..] v0.1.0 ([..]) -[COMPILING] [..] v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_all_exclude_broken_glob() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("build --workspace --exclude '[*z'") - .with_status(101) - .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`") - .run(); -} - -#[cargo_test] -fn build_all_workspace_implicit_examples() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("src/bin/b.rs", "fn main() {}") - .file("examples/c.rs", "fn main() {}") - .file("examples/d.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .file("bar/src/bin/e.rs", "fn main() {}") - .file("bar/src/bin/f.rs", "fn main() {}") - .file("bar/examples/g.rs", "fn main() {}") - .file("bar/examples/h.rs", "fn main() {}") - .build(); - - p.cargo("build --workspace --examples") - .with_stderr( - "[..] Compiling bar v0.1.0 ([..])\n\ - [..] Compiling foo v0.1.0 ([..])\n\ - [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); - assert!(!p.bin("a").is_file()); - assert!(!p.bin("b").is_file()); - assert!(p.bin("examples/c").is_file()); - assert!(p.bin("examples/d").is_file()); - assert!(!p.bin("e").is_file()); - assert!(!p.bin("f").is_file()); - assert!(p.bin("examples/g").is_file()); - assert!(p.bin("examples/h").is_file()); -} - -#[cargo_test] -fn build_all_virtual_manifest() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - // The order in which bar and baz are built is not guaranteed - p.cargo("build --workspace") - .with_stderr_unordered( - "\ -[COMPILING] baz v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_virtual_manifest_all_implied() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - // The order in which `bar` and `baz` are built is not guaranteed. - p.cargo("build") - .with_stderr_unordered( - "\ -[COMPILING] baz v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_virtual_manifest_one_project() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("build -p bar") - .with_stderr_does_not_contain("[..]baz[..]") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_virtual_manifest_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build -p '*z'") - .with_stderr_does_not_contain("[..]bar[..]") - .with_stderr( - "\ -[COMPILING] baz v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_virtual_manifest_glob_not_found() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build -p bar -p '*z'") - .with_status(101) - .with_stderr("[ERROR] package pattern(s) `*z` not found in workspace [..]") - .run(); -} - -#[cargo_test] -fn build_virtual_manifest_broken_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build -p '[*z'") - .with_status(101) - .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`") - .run(); -} - -#[cargo_test] -fn build_all_virtual_manifest_implicit_examples() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .file("bar/src/bin/a.rs", "fn main() {}") - .file("bar/src/bin/b.rs", "fn main() {}") - .file("bar/examples/c.rs", "fn main() {}") - .file("bar/examples/d.rs", "fn main() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "") - .file("baz/src/bin/e.rs", "fn main() {}") - .file("baz/src/bin/f.rs", "fn main() {}") - .file("baz/examples/g.rs", "fn main() {}") - .file("baz/examples/h.rs", "fn main() {}") - .build(); - - // The order in which bar and baz are built is not guaranteed - p.cargo("build --workspace --examples") - .with_stderr_unordered( - "\ -[COMPILING] baz v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - assert!(!p.bin("a").is_file()); - assert!(!p.bin("b").is_file()); - assert!(p.bin("examples/c").is_file()); - assert!(p.bin("examples/d").is_file()); - assert!(!p.bin("e").is_file()); - assert!(!p.bin("f").is_file()); - assert!(p.bin("examples/g").is_file()); - assert!(p.bin("examples/h").is_file()); -} - -#[cargo_test] -fn build_all_member_dependency_same_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.1.0" - - [dependencies] - a = "0.1.0" - "#, - ) - .file("a/src/lib.rs", "pub fn a() {}") - .build(); - - Package::new("a", "0.1.0").publish(); - - p.cargo("build --workspace") - .with_stderr( - "[UPDATING] `[..]` index\n\ - [DOWNLOADING] crates ...\n\ - [DOWNLOADED] a v0.1.0 ([..])\n\ - [COMPILING] a v0.1.0\n\ - [COMPILING] a v0.1.0 ([..])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn run_proper_binary() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - [[bin]] - name = "main" - [[bin]] - name = "other" - "#, - ) - .file("src/lib.rs", "") - .file( - "src/bin/main.rs", - r#"fn main() { panic!("This should never be run."); }"#, - ) - .file("src/bin/other.rs", "fn main() {}") - .build(); - - p.cargo("run --bin other").run(); -} - -#[cargo_test] -fn run_proper_binary_main_rs() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/lib.rs", "") - .file("src/bin/main.rs", "fn main() {}") - .build(); - - p.cargo("run --bin foo").run(); -} - -#[cargo_test] -fn run_proper_alias_binary_from_src() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - [[bin]] - name = "foo" - [[bin]] - name = "bar" - "#, - ) - .file("src/foo.rs", r#"fn main() { println!("foo"); }"#) - .file("src/bar.rs", r#"fn main() { println!("bar"); }"#) - .build(); - - p.cargo("build --workspace").run(); - p.process(&p.bin("foo")).with_stdout("foo\n").run(); - p.process(&p.bin("bar")).with_stdout("bar\n").run(); -} - -#[cargo_test] -fn run_proper_alias_binary_main_rs() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - [[bin]] - name = "foo" - [[bin]] - name = "bar" - "#, - ) - .file("src/main.rs", r#"fn main() { println!("main"); }"#) - .build(); - - p.cargo("build --workspace").run(); - p.process(&p.bin("foo")).with_stdout("main\n").run(); - p.process(&p.bin("bar")).with_stdout("main\n").run(); -} - -#[cargo_test] -fn run_proper_binary_main_rs_as_foo() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/foo.rs", - r#" fn main() { panic!("This should never be run."); }"#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("run --bin foo").run(); -} - -#[cargo_test] -fn rustc_wrapper() { - let p = project().file("src/lib.rs", "").build(); - let wrapper = tools::echo_wrapper(); - let running = format!( - "[RUNNING] `{} rustc --crate-name foo [..]", - wrapper.display() - ); - p.cargo("build -v") - .env("RUSTC_WRAPPER", &wrapper) - .with_stderr_contains(&running) - .run(); - p.build_dir().rm_rf(); - p.cargo("build -v") - .env("RUSTC_WORKSPACE_WRAPPER", &wrapper) - .with_stderr_contains(&running) - .run(); -} - -#[cargo_test] -fn rustc_wrapper_relative() { - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - let wrapper = tools::echo_wrapper(); - let exe_name = wrapper.file_name().unwrap().to_str().unwrap(); - let relative_path = format!("./{}", exe_name); - fs::hard_link(&wrapper, p.root().join(exe_name)).unwrap(); - let running = format!("[RUNNING] `[ROOT]/foo/./{} rustc[..]", exe_name); - p.cargo("build -v") - .env("RUSTC_WRAPPER", &relative_path) - .with_stderr_contains(&running) - .run(); - p.build_dir().rm_rf(); - p.cargo("build -v") - .env("RUSTC_WORKSPACE_WRAPPER", &relative_path) - .with_stderr_contains(&running) - .run(); - p.build_dir().rm_rf(); - p.change_file( - ".cargo/config.toml", - &format!( - r#" - build.rustc-wrapper = "./{}" - "#, - exe_name - ), - ); - p.cargo("build -v").with_stderr_contains(&running).run(); -} - -#[cargo_test] -fn rustc_wrapper_from_path() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("build -v") - .env("RUSTC_WRAPPER", "wannabe_sccache") - .with_status(101) - .with_stderr_contains("[..]`wannabe_sccache rustc [..]") - .run(); - p.build_dir().rm_rf(); - p.cargo("build -v") - .env("RUSTC_WORKSPACE_WRAPPER", "wannabe_sccache") - .with_status(101) - .with_stderr_contains("[..]`wannabe_sccache rustc [..]") - .run(); -} - -#[cargo_test] -fn cdylib_not_lifted() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - authors = [] - version = "0.1.0" - - [lib] - crate-type = ["cdylib"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - let files = if cfg!(windows) { - if cfg!(target_env = "msvc") { - vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"] - } else { - vec!["libfoo.dll.a", "foo.dll"] - } - } else if cfg!(target_os = "macos") { - vec!["libfoo.dylib"] - } else { - vec!["libfoo.so"] - }; - - for file in files { - println!("checking: {}", file); - assert!(p.root().join("target/debug/deps").join(&file).is_file()); - } -} - -#[cargo_test] -fn cdylib_final_outputs() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo-bar" - authors = [] - version = "0.1.0" - - [lib] - crate-type = ["cdylib"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - let files = if cfg!(windows) { - if cfg!(target_env = "msvc") { - vec!["foo_bar.dll.lib", "foo_bar.dll"] - } else { - vec!["foo_bar.dll", "libfoo_bar.dll.a"] - } - } else if cfg!(target_os = "macos") { - vec!["libfoo_bar.dylib"] - } else { - vec!["libfoo_bar.so"] - }; - - for file in files { - println!("checking: {}", file); - assert!(p.root().join("target/debug").join(&file).is_file()); - } -} - -#[cargo_test] -fn deterministic_cfg_flags() { - // This bug is non-deterministic. - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - build = "build.rs" - - [features] - default = ["f_a", "f_b", "f_c", "f_d"] - f_a = [] - f_b = [] - f_c = [] - f_d = [] - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-cfg=cfg_a"); - println!("cargo:rustc-cfg=cfg_b"); - println!("cargo:rustc-cfg=cfg_c"); - println!("cargo:rustc-cfg=cfg_d"); - println!("cargo:rustc-cfg=cfg_e"); - } - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.1.0 [..] -[RUNNING] [..] -[RUNNING] [..] -[RUNNING] `rustc --crate-name foo [..] \ ---cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\ ---cfg[..]f_c[..]--cfg[..]f_d[..] \ ---cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn explicit_bins_without_paths() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [[bin]] - name = "foo" - - [[bin]] - name = "bar" - "#, - ) - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .file("src/bin/bar.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn no_bin_in_src_with_lib() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/lib.rs", "") - .file("src/foo.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - can't find `foo` bin at `src/bin/foo.rs` or `src/bin/foo/main.rs`. [..]", - ) - .run(); -} - -#[cargo_test] -fn inferred_bins() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/bin/bar.rs", "fn main() {}") - .file("src/bin/baz/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("bar").is_file()); - assert!(p.bin("baz").is_file()); -} - -#[cargo_test] -fn inferred_bins_duplicate_name() { - // this should fail, because we have two binaries with the same name - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/bin/bar.rs", "fn main() {}") - .file("src/bin/bar/main.rs", "fn main() {}") - .build(); - - p.cargo("build").with_status(101).with_stderr_contains( - "[..]found duplicate binary name bar, but all binary targets must have a unique name[..]", - ) - .run(); -} - -#[cargo_test] -fn inferred_bin_path() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [[bin]] - name = "bar" - # Note, no `path` key! - "#, - ) - .file("src/bin/bar/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - assert!(p.bin("bar").is_file()); -} - -#[cargo_test] -fn inferred_examples() { - let p = project() - .file("src/lib.rs", "fn main() {}") - .file("examples/bar.rs", "fn main() {}") - .file("examples/baz/main.rs", "fn main() {}") - .build(); - - p.cargo("build --examples").run(); - assert!(p.bin("examples/bar").is_file()); - assert!(p.bin("examples/baz").is_file()); -} - -#[cargo_test] -fn inferred_tests() { - let p = project() - .file("src/lib.rs", "fn main() {}") - .file("tests/bar.rs", "fn main() {}") - .file("tests/baz/main.rs", "fn main() {}") - .build(); - - p.cargo("test --test=bar --test=baz").run(); -} - -#[cargo_test] -fn inferred_benchmarks() { - let p = project() - .file("src/lib.rs", "fn main() {}") - .file("benches/bar.rs", "fn main() {}") - .file("benches/baz/main.rs", "fn main() {}") - .build(); - - p.cargo("bench --bench=bar --bench=baz").run(); -} - -#[cargo_test] -fn target_edition() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - edition = "2018" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..]--edition=2018 [..] -", - ) - .run(); -} - -#[cargo_test] -fn target_edition_override() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - edition = "2018" - - [lib] - edition = "2015" - "#, - ) - .file( - "src/lib.rs", - " - pub fn async() {} - pub fn try() {} - pub fn await() {} - ", - ) - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn same_metadata_different_directory() { - // A top-level crate built in two different workspaces should have the - // same metadata hash. - let p = project() - .at("foo1") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - let output = t!(String::from_utf8( - t!(p.cargo("build -v").exec_with_output()).stderr, - )); - let metadata = output - .split_whitespace() - .find(|arg| arg.starts_with("metadata=")) - .unwrap(); - - let p = project() - .at("foo2") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build -v") - .with_stderr_contains(format!("[..]{}[..]", metadata)) - .run(); -} - -#[cargo_test] -fn building_a_dependent_crate_witout_bin_should_fail() { - Package::new("testless", "0.1.0") - .file( - "Cargo.toml", - r#" - [project] - name = "testless" - version = "0.1.0" - - [[bin]] - name = "a_bin" - "#, - ) - .file("src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - testless = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "[..]can't find `a_bin` bin at `src/bin/a_bin.rs` or `src/bin/a_bin/main.rs`[..]", - ) - .run(); -} - -#[cargo_test] -#[cfg(any(target_os = "macos", target_os = "ios"))] -fn uplift_dsym_of_bin_on_mac() { - use cargo_test_support::paths::is_symlink; - let p = project() - .file("src/main.rs", "fn main() { panic!(); }") - .file("src/bin/b.rs", "fn main() { panic!(); }") - .file("examples/c.rs", "fn main() { panic!(); }") - .file("tests/d.rs", "fn main() { panic!(); }") - .build(); - - p.cargo("build --bins --examples --tests") - .enable_mac_dsym() - .run(); - assert!(p.target_debug_dir().join("foo.dSYM").is_dir()); - assert!(p.target_debug_dir().join("b.dSYM").is_dir()); - assert!(is_symlink(&p.target_debug_dir().join("b.dSYM"))); - assert!(p.target_debug_dir().join("examples/c.dSYM").is_dir()); - assert!(!p.target_debug_dir().join("c.dSYM").exists()); - assert!(!p.target_debug_dir().join("d.dSYM").exists()); -} - -#[cargo_test] -#[cfg(any(target_os = "macos", target_os = "ios"))] -fn uplift_dsym_of_bin_on_mac_when_broken_link_exists() { - use cargo_test_support::paths::is_symlink; - let p = project() - .file("src/main.rs", "fn main() { panic!(); }") - .build(); - let dsym = p.target_debug_dir().join("foo.dSYM"); - - p.cargo("build").enable_mac_dsym().run(); - assert!(dsym.is_dir()); - - // Simulate the situation where the underlying dSYM bundle goes missing - // but the uplifted symlink to it remains. This would previously cause - // builds to permanently fail until the bad symlink was manually removed. - dsym.rm_rf(); - p.symlink( - p.target_debug_dir() - .join("deps") - .join("foo-baaaaaadbaaaaaad.dSYM"), - &dsym, - ); - assert!(is_symlink(&dsym)); - assert!(!dsym.exists()); - - p.cargo("build").enable_mac_dsym().run(); - assert!(dsym.is_dir()); -} - -#[cargo_test] -#[cfg(all(target_os = "windows", target_env = "msvc"))] -fn uplift_pdb_of_bin_on_windows() { - let p = project() - .file("src/main.rs", "fn main() { panic!(); }") - .file("src/bin/b.rs", "fn main() { panic!(); }") - .file("src/bin/foo-bar.rs", "fn main() { panic!(); }") - .file("examples/c.rs", "fn main() { panic!(); }") - .file("tests/d.rs", "fn main() { panic!(); }") - .build(); - - p.cargo("build --bins --examples --tests").run(); - assert!(p.target_debug_dir().join("foo.pdb").is_file()); - assert!(p.target_debug_dir().join("b.pdb").is_file()); - assert!(p.target_debug_dir().join("examples/c.pdb").exists()); - assert!(p.target_debug_dir().join("foo-bar.exe").is_file()); - assert!(p.target_debug_dir().join("foo_bar.pdb").is_file()); - assert!(!p.target_debug_dir().join("c.pdb").exists()); - assert!(!p.target_debug_dir().join("d.pdb").exists()); -} - -// Ensure that `cargo build` chooses the correct profile for building -// targets based on filters (assuming `--profile` is not specified). -#[cargo_test] -fn build_filter_infer_profile() { - let p = project() - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .file("tests/t1.rs", "") - .file("benches/b1.rs", "") - .file("examples/ex1.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]", - ) - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]", - ) - .run(); - - p.root().join("target").rm_rf(); - p.cargo("build -v --test=t1") - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 [..]", - ) - .with_stderr_contains( - "[RUNNING] `rustc --crate-name t1 tests/t1.rs [..]--emit=[..]link[..]\ - -C debuginfo=2 [..]", - ) - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]-C debuginfo=2 [..]", - ) - .run(); - - p.root().join("target").rm_rf(); - // Bench uses test profile without `--release`. - p.cargo("build -v --bench=b1") - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 [..]", - ) - .with_stderr_contains( - "[RUNNING] `rustc --crate-name b1 benches/b1.rs [..]--emit=[..]link[..]\ - -C debuginfo=2 [..]", - ) - .with_stderr_does_not_contain("opt-level") - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]-C debuginfo=2 [..]", - ) - .run(); -} - -#[cargo_test] -fn targets_selected_default() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("build -v") - // Binaries. - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]", - ) - // Benchmarks. - .with_stderr_does_not_contain( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \ - -C opt-level=3 --test [..]", - ) - // Unit tests. - .with_stderr_does_not_contain( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\ - -C debuginfo=2 --test [..]", - ) - .run(); -} - -#[cargo_test] -fn targets_selected_all() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("build -v --all-targets") - // Binaries. - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]", - ) - // Unit tests. - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\ - -C debuginfo=2 --test [..]", - ) - .run(); -} - -#[cargo_test] -fn all_targets_no_lib() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("build -v --all-targets") - // Binaries. - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]", - ) - // Unit tests. - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\ - -C debuginfo=2 --test [..]", - ) - .run(); -} - -#[cargo_test] -fn no_linkable_target() { - // Issue 3169: this is currently not an error as per discussion in PR #4797. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - [dependencies] - the_lib = { path = "the_lib" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "the_lib/Cargo.toml", - r#" - [package] - name = "the_lib" - version = "0.1.0" - [lib] - name = "the_lib" - crate-type = ["staticlib"] - "#, - ) - .file("the_lib/src/lib.rs", "pub fn foo() {}") - .build(); - p.cargo("build") - .with_stderr_contains( - "[WARNING] The package `the_lib` provides no linkable [..] \ - while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]", - ) - .run(); -} - -#[cargo_test] -fn avoid_dev_deps() { - Package::new("foo", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dev-dependencies] - baz = "1.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] no matching package named `baz` found -location searched: registry `crates-io` -required by package `bar v0.1.0 ([..]/foo)` -", - ) - .run(); - p.cargo("build -Zavoid-dev-deps") - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn default_cargo_config_jobs() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - jobs = 1 - "#, - ) - .build(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn good_cargo_config_jobs() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - jobs = 4 - "#, - ) - .build(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn invalid_cargo_config_jobs() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - jobs = 0 - "#, - ) - .build(); - p.cargo("build -v") - .with_status(101) - .with_stderr_contains("error: jobs may not be 0") - .run(); -} - -#[cargo_test] -fn invalid_jobs() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build --jobs -1") - .with_status(1) - .with_stderr_contains( - "error: Found argument '-1' which wasn't expected, or isn't valid in this context", - ) - .run(); - - p.cargo("build --jobs over9000") - .with_status(1) - .with_stderr("error: Invalid value: could not parse `over9000` as a number") - .run(); -} - -#[cargo_test] -fn target_filters_workspace() { - let ws = project() - .at("ws") - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_lib_manifest("a")) - .file("a/src/lib.rs", "") - .file("a/examples/ex1.rs", "fn main() {}") - .file("b/Cargo.toml", &basic_bin_manifest("b")) - .file("b/src/lib.rs", "") - .file("b/src/main.rs", "fn main() {}") - .build(); - - ws.cargo("build -v --example ex") - .with_status(101) - .with_stderr( - "\ -[ERROR] no example target named `ex` - -Did you mean `ex1`?", - ) - .run(); - - ws.cargo("build -v --example 'ex??'") - .with_status(101) - .with_stderr( - "\ -[ERROR] no example target matches pattern `ex??` - -Did you mean `ex1`?", - ) - .run(); - - ws.cargo("build -v --lib") - .with_stderr_contains("[RUNNING] `rustc [..]a/src/lib.rs[..]") - .with_stderr_contains("[RUNNING] `rustc [..]b/src/lib.rs[..]") - .run(); - - ws.cargo("build -v --example ex1") - .with_stderr_contains("[RUNNING] `rustc [..]a/examples/ex1.rs[..]") - .run(); -} - -#[cargo_test] -fn target_filters_workspace_not_found() { - let ws = project() - .at("ws") - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_bin_manifest("a")) - .file("a/src/main.rs", "fn main() {}") - .file("b/Cargo.toml", &basic_bin_manifest("b")) - .file("b/src/main.rs", "fn main() {}") - .build(); - - ws.cargo("build -v --lib") - .with_status(101) - .with_stderr("[ERROR] no library targets found in packages: a, b") - .run(); -} - -#[cfg(unix)] -#[cargo_test] -fn signal_display() { - // Cause the compiler to crash with a signal. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - pm = { path = "pm" } - "#, - ) - .file( - "src/lib.rs", - r#" - #[macro_use] - extern crate pm; - - #[derive(Foo)] - pub struct S; - "#, - ) - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - [lib] - proc-macro = true - "#, - ) - .file( - "pm/src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro_derive(Foo)] - pub fn derive(_input: TokenStream) -> TokenStream { - std::process::abort() - } - "#, - ) - .build(); - - foo.cargo("build") - .with_stderr( - "\ -[COMPILING] pm [..] -[COMPILING] foo [..] -[ERROR] could not compile `foo` - -Caused by: - process didn't exit successfully: `rustc [..]` (signal: 6, SIGABRT: process abort signal) -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn tricky_pipelining() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .build(); - - foo.cargo("build -p bar") - .env("CARGO_BUILD_PIPELINING", "true") - .run(); - foo.cargo("build -p foo") - .env("CARGO_BUILD_PIPELINING", "true") - .run(); -} - -#[cargo_test] -fn pipelining_works() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .build(); - - foo.cargo("build") - .env("CARGO_BUILD_PIPELINING", "true") - .with_stdout("") - .with_stderr( - "\ -[COMPILING] [..] -[COMPILING] [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn pipelining_big_graph() { - // Create a crate graph of the form {a,b}{0..29}, where {a,b}(n) depend on {a,b}(n+1) - // Then have `foo`, a binary crate, depend on the whole thing. - let mut project = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - a1 = { path = "a1" } - b1 = { path = "b1" } - "#, - ) - .file("src/main.rs", "fn main(){}"); - - for n in 0..30 { - for x in &["a", "b"] { - project = project - .file( - &format!("{x}{n}/Cargo.toml", x = x, n = n), - &format!( - r#" - [package] - name = "{x}{n}" - version = "0.1.0" - [dependencies] - a{np1} = {{ path = "../a{np1}" }} - b{np1} = {{ path = "../b{np1}" }} - "#, - x = x, - n = n, - np1 = n + 1 - ), - ) - .file(&format!("{x}{n}/src/lib.rs", x = x, n = n), ""); - } - } - - let foo = project - .file("a30/Cargo.toml", &basic_lib_manifest("a30")) - .file( - "a30/src/lib.rs", - r#"compile_error!("don't actually build me");"#, - ) - .file("b30/Cargo.toml", &basic_lib_manifest("b30")) - .file("b30/src/lib.rs", "") - .build(); - foo.cargo("build -p foo") - .env("CARGO_BUILD_PIPELINING", "true") - .with_status(101) - .with_stderr_contains("[ERROR] could not compile `a30`[..]") - .run(); -} - -#[cargo_test] -fn forward_rustc_output() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = '2018' - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "bar::foo!();") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - [lib] - proc-macro = true - "#, - ) - .file( - "bar/src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::*; - - #[proc_macro] - pub fn foo(input: TokenStream) -> TokenStream { - println!("a"); - println!("b"); - println!("{{}}"); - eprintln!("c"); - eprintln!("d"); - eprintln!("{{a"); // "malformed json" - input - } - "#, - ) - .build(); - - foo.cargo("build") - .with_stdout("a\nb\n{}") - .with_stderr( - "\ -[COMPILING] [..] -[COMPILING] [..] -c -d -{a -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_lib_only() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("build --lib -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn build_with_no_lib() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --lib") - .with_status(101) - .with_stderr("[ERROR] no library targets found in package `foo`") - .run(); -} - -#[cargo_test] -fn build_with_relative_cargo_home_path() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "foo" - version = "0.0.1" - authors = ["wycats@example.com"] - - [dependencies] - - "test-dependency" = { path = "src/test_dependency" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("src/test_dependency/src/lib.rs", r#" "#) - .file( - "src/test_dependency/Cargo.toml", - &basic_manifest("test-dependency", "0.0.1"), - ) - .build(); - - p.cargo("build").env("CARGO_HOME", "./cargo_home/").run(); -} - -#[cargo_test] -fn user_specific_cfgs_are_filtered_out() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", r#"fn main() {}"#) - .file( - "build.rs", - r#" - fn main() { - assert!(std::env::var_os("CARGO_CFG_PROC_MACRO").is_none()); - assert!(std::env::var_os("CARGO_CFG_DEBUG_ASSERTIONS").is_none()); - } - "#, - ) - .build(); - - p.cargo("rustc -- --cfg debug_assertions --cfg proc_macro") - .run(); - p.process(&p.bin("foo")).run(); -} - -#[cargo_test] -fn close_output() { - // What happens when stdout or stderr is closed during a build. - - // Server to know when rustc has spawned. - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = listener.local_addr().unwrap(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [lib] - proc-macro = true - - [[bin]] - name = "foobar" - "#, - ) - .file( - "src/lib.rs", - &r#" - use proc_macro::TokenStream; - use std::io::Read; - - #[proc_macro] - pub fn repro(_input: TokenStream) -> TokenStream { - println!("hello stdout!"); - eprintln!("hello stderr!"); - // Tell the test we have started. - let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap(); - // Wait for the test to tell us to start printing. - let mut buf = [0]; - drop(socket.read_exact(&mut buf)); - let use_stderr = std::env::var("__CARGO_REPRO_STDERR").is_ok(); - // Emit at least 1MB of data. - // Linux pipes can buffer up to 64KB. - // This test seems to be sensitive to having other threads - // calling fork. My hypothesis is that the stdout/stderr - // file descriptors are duplicated into the child process, - // and during the short window between fork and exec, the - // file descriptor is kept alive long enough for the - // build to finish. It's a half-baked theory, but this - // seems to prevent the spurious errors in CI. - // An alternative solution is to run this test in - // a single-threaded environment. - for i in 0..100000 { - if use_stderr { - eprintln!("0123456789{}", i); - } else { - println!("0123456789{}", i); - } - } - TokenStream::new() - } - "# - .replace("__ADDR__", &addr.to_string()), - ) - .file( - "src/bin/foobar.rs", - r#" - foo::repro!(); - - fn main() {} - "#, - ) - .build(); - - // The `stderr` flag here indicates if this should forcefully close stderr or stdout. - let spawn = |stderr: bool| { - let mut cmd = p.cargo("build").build_command(); - cmd.stdout(Stdio::piped()).stderr(Stdio::piped()); - if stderr { - cmd.env("__CARGO_REPRO_STDERR", "1"); - } - let mut child = cmd.spawn().unwrap(); - // Wait for proc macro to start. - let pm_conn = listener.accept().unwrap().0; - // Close stderr or stdout. - if stderr { - drop(child.stderr.take()); - } else { - drop(child.stdout.take()); - } - // Tell the proc-macro to continue; - drop(pm_conn); - // Read the output from the other channel. - let out: &mut dyn Read = if stderr { - child.stdout.as_mut().unwrap() - } else { - child.stderr.as_mut().unwrap() - }; - let mut result = String::new(); - out.read_to_string(&mut result).unwrap(); - let status = child.wait().unwrap(); - assert!(!status.success()); - result - }; - - let stderr = spawn(false); - compare::match_unordered( - "\ -[COMPILING] foo [..] -hello stderr! -[ERROR] [..] -[WARNING] build failed, waiting for other jobs to finish... -[ERROR] [..] -", - &stderr, - None, - ) - .unwrap(); - - // Try again with stderr. - p.build_dir().rm_rf(); - let stdout = spawn(true); - assert_eq!(stdout, "hello stdout!\n"); -} - -#[cargo_test] -fn close_output_during_drain() { - // Test to close the output during the build phase (drain_the_queue). - // There was a bug where it would hang. - - // Server to know when rustc has spawned. - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = listener.local_addr().unwrap(); - - // Create a wrapper so the test can know when compiling has started. - let rustc_wrapper = { - let p = project() - .at("compiler") - .file("Cargo.toml", &basic_manifest("compiler", "1.0.0")) - .file( - "src/main.rs", - &r#" - use std::process::Command; - use std::env; - use std::io::Read; - - fn main() { - // Only wait on the first dependency. - if matches!(env::var("CARGO_PKG_NAME").as_deref(), Ok("dep")) { - let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap(); - // Wait for the test to tell us to start printing. - let mut buf = [0]; - drop(socket.read_exact(&mut buf)); - } - let mut cmd = Command::new("rustc"); - for arg in env::args_os().skip(1) { - cmd.arg(arg); - } - std::process::exit(cmd.status().unwrap().code().unwrap()); - } - "# - .replace("__ADDR__", &addr.to_string()), - ) - .build(); - p.cargo("build").run(); - p.bin("compiler") - }; - - Package::new("dep", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Spawn cargo, wait for the first rustc to start, and then close stderr. - let mut cmd = process(&cargo_exe()) - .arg("check") - .cwd(p.root()) - .env("RUSTC", rustc_wrapper) - .build_command(); - cmd.stdout(Stdio::piped()).stderr(Stdio::piped()); - let mut child = cmd.spawn().expect("cargo should spawn"); - // Wait for the rustc wrapper to start. - let rustc_conn = listener.accept().unwrap().0; - // Close stderr to force an error. - drop(child.stderr.take()); - // Tell the wrapper to continue. - drop(rustc_conn); - match child.wait() { - Ok(status) => assert!(!status.success()), - Err(e) => panic!("child wait failed: {}", e), - } -} - -use cargo_test_support::registry::Dependency; - -#[cargo_test] -fn reduced_reproduction_8249() { - // https://github.com/rust-lang/cargo/issues/8249 - Package::new("a-src", "0.1.0").links("a").publish(); - Package::new("a-src", "0.2.0").links("a").publish(); - - Package::new("b", "0.1.0") - .add_dep(Dependency::new("a-src", "0.1").optional(true)) - .publish(); - Package::new("b", "0.2.0") - .add_dep(Dependency::new("a-src", "0.2").optional(true)) - .publish(); - - Package::new("c", "1.0.0") - .add_dep(&Dependency::new("b", "0.1.0")) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - b = { version = "*", features = ["a-src"] } - a-src = "*" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("generate-lockfile").run(); - cargo_util::paths::append(&p.root().join("Cargo.toml"), b"c = \"*\"").unwrap(); - p.cargo("check").run(); - p.cargo("check").run(); -} - -#[cargo_test] -fn target_directory_backup_exclusion() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - // Newly created target/ should have CACHEDIR.TAG inside... - p.cargo("build").run(); - let cachedir_tag = p.build_dir().join("CACHEDIR.TAG"); - assert!(cachedir_tag.is_file()); - assert!(fs::read_to_string(&cachedir_tag) - .unwrap() - .starts_with("Signature: 8a477f597d28d172789f06886806bc55")); - // ...but if target/ already exists CACHEDIR.TAG should not be created in it. - fs::remove_file(&cachedir_tag).unwrap(); - p.cargo("build").run(); - assert!(!&cachedir_tag.is_file()); -} - -#[cargo_test] -fn simple_terminal_width() { - if !is_nightly() { - // --terminal-width is unstable - return; - } - let p = project() - .file( - "src/lib.rs", - r#" - fn main() { - let _: () = 42; - } - "#, - ) - .build(); - - p.cargo("build -Zterminal-width=20") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains("3 | ..._: () = 42;") - .run(); -} - -#[cargo_test] -fn build_script_o0_default() { - let p = project() - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - p.cargo("build -v --release") - .with_stderr_does_not_contain("[..]build_script_build[..]opt-level[..]") - .run(); -} - -#[cargo_test] -fn build_script_o0_default_even_with_release() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.release] - opt-level = 1 - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - p.cargo("build -v --release") - .with_stderr_does_not_contain("[..]build_script_build[..]opt-level[..]") - .run(); -} - -#[cargo_test] -fn primary_package_env_var() { - // Test that CARGO_PRIMARY_PACKAGE is enabled only for "foo" and not for any dependency. - - let is_primary_package = r#" - pub fn is_primary_package() -> bool {{ - option_env!("CARGO_PRIMARY_PACKAGE").is_some() - }} - "#; - - Package::new("qux", "0.1.0") - .file("src/lib.rs", is_primary_package) - .publish(); - - let baz = git::new("baz", |project| { - project - .file("Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("src/lib.rs", is_primary_package) - }); - - let foo = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = {{ path = "bar" }} - baz = {{ git = '{}' }} - qux = "0.1" - "#, - baz.url() - ), - ) - .file( - "src/lib.rs", - &format!( - r#" - extern crate bar; - extern crate baz; - extern crate qux; - - {} - - #[test] - fn verify_primary_package() {{ - assert!(!bar::is_primary_package()); - assert!(!baz::is_primary_package()); - assert!(!qux::is_primary_package()); - assert!(is_primary_package()); - }} - "#, - is_primary_package - ), - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", is_primary_package) - .build(); - - foo.cargo("test").run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_plan.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_plan.rs deleted file mode 100644 index c8f398d5c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_plan.rs +++ /dev/null @@ -1,222 +0,0 @@ -//! Tests for --build-plan feature. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_bin_manifest, basic_manifest, main_file, project}; - -#[cargo_test] -fn cargo_build_plan_simple() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build --build-plan -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_json( - r#" - { - "inputs": [ - "[..]/foo/Cargo.toml" - ], - "invocations": [ - { - "args": "{...}", - "cwd": "[..]/cit/[..]/foo", - "deps": [], - "env": "{...}", - "kind": null, - "links": "{...}", - "outputs": "{...}", - "package_name": "foo", - "package_version": "0.5.0", - "program": "rustc", - "target_kind": ["bin"], - "compile_mode": "build" - } - ] - } - "#, - ) - .run(); - assert!(!p.bin("foo").is_file()); -} - -#[cargo_test] -fn cargo_build_plan_single_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.5.0" - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar; - pub fn foo() { bar::bar(); } - - #[test] - fn test() { foo(); } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - p.cargo("build --build-plan -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_json( - r#" - { - "inputs": [ - "[..]/foo/Cargo.toml", - "[..]/foo/bar/Cargo.toml" - ], - "invocations": [ - { - "args": "{...}", - "cwd": "[..]/cit/[..]/foo", - "deps": [], - "env": "{...}", - "kind": null, - "links": "{...}", - "outputs": [ - "[..]/foo/target/debug/deps/libbar-[..].rlib", - "[..]/foo/target/debug/deps/libbar-[..].rmeta" - ], - "package_name": "bar", - "package_version": "0.0.1", - "program": "rustc", - "target_kind": ["lib"], - "compile_mode": "build" - }, - { - "args": "{...}", - "cwd": "[..]/cit/[..]/foo", - "deps": [0], - "env": "{...}", - "kind": null, - "links": "{...}", - "outputs": [ - "[..]/foo/target/debug/deps/libfoo-[..].rlib", - "[..]/foo/target/debug/deps/libfoo-[..].rmeta" - ], - "package_name": "foo", - "package_version": "0.5.0", - "program": "rustc", - "target_kind": ["lib"], - "compile_mode": "build" - } - ] - } - "#, - ) - .run(); -} - -#[cargo_test] -fn cargo_build_plan_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - "#, - ) - .file("src/main.rs", r#"fn main() {}"#) - .file("build.rs", r#"fn main() {}"#) - .build(); - - p.cargo("build --build-plan -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_json( - r#" - { - "inputs": [ - "[..]/foo/Cargo.toml" - ], - "invocations": [ - { - "args": "{...}", - "cwd": "[..]/cit/[..]/foo", - "deps": [], - "env": "{...}", - "kind": null, - "links": "{...}", - "outputs": "{...}", - "package_name": "foo", - "package_version": "0.5.0", - "program": "rustc", - "target_kind": ["custom-build"], - "compile_mode": "build" - }, - { - "args": "{...}", - "cwd": "[..]/cit/[..]/foo", - "deps": [0], - "env": "{...}", - "kind": null, - "links": "{...}", - "outputs": [], - "package_name": "foo", - "package_version": "0.5.0", - "program": "[..]/build-script-build", - "target_kind": ["custom-build"], - "compile_mode": "run-custom-build" - }, - { - "args": "{...}", - "cwd": "[..]/cit/[..]/foo", - "deps": [1], - "env": "{...}", - "kind": null, - "links": "{...}", - "outputs": "{...}", - "package_name": "foo", - "package_version": "0.5.0", - "program": "rustc", - "target_kind": ["bin"], - "compile_mode": "build" - } - ] - } - "#, - ) - .run(); -} - -#[cargo_test] -fn build_plan_with_dev_dep() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dev-dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --build-plan -Zunstable-options") - .masquerade_as_nightly_cargo() - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script.rs deleted file mode 100644 index 40d8067d5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script.rs +++ /dev/null @@ -1,4909 +0,0 @@ -//! Tests for build.rs scripts. - -use cargo_test_support::compare::assert_match_exact; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::Package; -use cargo_test_support::tools; -use cargo_test_support::{basic_manifest, cross_compile, is_coarse_mtime, project}; -use cargo_test_support::{rustc_host, sleep_ms, slow_cpu_multiplier, symlink_supported}; -use cargo_util::paths::remove_dir_all; -use std::env; -use std::fs; -use std::io; -use std::thread; - -#[cargo_test] -fn custom_build_script_failed() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("build.rs", "fn main() { std::process::exit(101); }") - .build(); - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]` -[RUNNING] `[..]/build-script-build` -[ERROR] failed to run custom build command for `foo v0.5.0 ([CWD])` - -Caused by: - process didn't exit successfully: `[..]/build-script-build` (exit [..]: 101)", - ) - .run(); -} - -#[cargo_test] -fn custom_build_env_vars() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [features] - bar_feat = ["bar/foo"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - - [features] - foo = [] - "#, - ) - .file("bar/src/lib.rs", "pub fn hello() {}"); - - let file_content = format!( - r#" - use std::env; - use std::path::Path; - - fn main() {{ - let _target = env::var("TARGET").unwrap(); - let _ncpus = env::var("NUM_JOBS").unwrap(); - let _dir = env::var("CARGO_MANIFEST_DIR").unwrap(); - - let opt = env::var("OPT_LEVEL").unwrap(); - assert_eq!(opt, "0"); - - let opt = env::var("PROFILE").unwrap(); - assert_eq!(opt, "debug"); - - let debug = env::var("DEBUG").unwrap(); - assert_eq!(debug, "true"); - - let out = env::var("OUT_DIR").unwrap(); - assert!(out.starts_with(r"{0}")); - assert!(Path::new(&out).is_dir()); - - let _host = env::var("HOST").unwrap(); - - let _feat = env::var("CARGO_FEATURE_FOO").unwrap(); - - let _cargo = env::var("CARGO").unwrap(); - - let rustc = env::var("RUSTC").unwrap(); - assert_eq!(rustc, "rustc"); - - let rustdoc = env::var("RUSTDOC").unwrap(); - assert_eq!(rustdoc, "rustdoc"); - - assert!(env::var("RUSTC_WRAPPER").is_err()); - assert!(env::var("RUSTC_WORKSPACE_WRAPPER").is_err()); - - assert!(env::var("RUSTC_LINKER").is_err()); - - assert!(env::var("RUSTFLAGS").is_err()); - let rustflags = env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(); - assert_eq!(rustflags, ""); - }} - "#, - p.root() - .join("target") - .join("debug") - .join("build") - .display(), - ); - - let p = p.file("bar/build.rs", &file_content).build(); - - p.cargo("build --features bar_feat").run(); -} - -#[cargo_test] -fn custom_build_env_var_rustflags() { - let rustflags = "--cfg=special"; - let rustflags_alt = "--cfg=notspecial"; - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [build] - rustflags = ["{}"] - "#, - rustflags - ), - ) - .file( - "build.rs", - &format!( - r#" - use std::env; - - fn main() {{ - // Static assertion that exactly one of the cfg paths is always taken. - assert!(env::var("RUSTFLAGS").is_err()); - let x; - #[cfg(special)] - {{ assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "{}"); x = String::new(); }} - #[cfg(notspecial)] - {{ assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "{}"); x = String::new(); }} - let _ = x; - }} - "#, - rustflags, rustflags_alt, - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check").run(); - - // RUSTFLAGS overrides build.rustflags, so --cfg=special shouldn't be passed - p.cargo("check").env("RUSTFLAGS", rustflags_alt).run(); -} - -#[cargo_test] -fn custom_build_env_var_encoded_rustflags() { - // NOTE: We use "-Clink-arg=-B nope" here rather than, say, "-A missing_docs", since for the - // latter it won't matter if the whitespace accidentally gets split, as rustc will do the right - // thing either way. - let p = project() - .file( - ".cargo/config", - r#" - [build] - rustflags = ["-Clink-arg=-B nope", "--cfg=foo"] - "#, - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() {{ - assert_eq!(env::var("CARGO_ENCODED_RUSTFLAGS").unwrap(), "-Clink-arg=-B nope\x1f--cfg=foo"); - }} - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check").run(); -} - -#[cargo_test] -fn custom_build_env_var_rustc_wrapper() { - let wrapper = tools::echo_wrapper(); - let p = project() - .file( - "build.rs", - r#" - use std::env; - - fn main() {{ - assert_eq!( - env::var("RUSTC_WRAPPER").unwrap(), - env::var("CARGO_RUSTC_WRAPPER_CHECK").unwrap() - ); - }} - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .env("CARGO_BUILD_RUSTC_WRAPPER", &wrapper) - .env("CARGO_RUSTC_WRAPPER_CHECK", &wrapper) - .run(); -} - -#[cargo_test] -fn custom_build_env_var_rustc_workspace_wrapper() { - let wrapper = tools::echo_wrapper(); - - // Workspace wrapper should be set for any crate we're operating directly on. - let p = project() - .file( - "build.rs", - r#" - use std::env; - - fn main() {{ - assert_eq!( - env::var("RUSTC_WORKSPACE_WRAPPER").unwrap(), - env::var("CARGO_RUSTC_WORKSPACE_WRAPPER_CHECK").unwrap() - ); - }} - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .env("CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER", &wrapper) - .env("CARGO_RUSTC_WORKSPACE_WRAPPER_CHECK", &wrapper) - .run(); - - // But should not be set for a crate from the registry, as then it's not in a workspace. - Package::new("bar", "0.1.0") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - links = "a" - "#, - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() {{ - assert!(env::var("RUSTC_WORKSPACE_WRAPPER").is_err()); - }} - "#, - ) - .file("src/lib.rs", "") - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .env("CARGO_BUILD_RUSTC_WORKSPACE_WRAPPER", &wrapper) - .run(); -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker() { - if cross_compile::disabled() { - return; - } - let target = cross_compile::alternate(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - linker = "/path/to/linker" - "#, - target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // no crate type set => linker never called => build succeeds if and - // only if build.rs succeeds, despite linker binary not existing. - p.cargo("build --target").arg(&target).run(); -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker_bad_host_target() { - let target = rustc_host(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - linker = "/path/to/linker" - "#, - target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // build.rs should fail since host == target when no target is set - p.cargo("build --verbose") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/linker [..]` -[ERROR] linker `[..]/path/to/linker` not found -" - ) - .run(); -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker_host_target() { - let target = rustc_host(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - target-applies-to-host = false - [target.{}] - linker = "/path/to/linker" - "#, - target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // no crate type set => linker never called => build succeeds if and - // only if build.rs succeeds, despite linker binary not existing. - if cargo_test_support::is_nightly() { - p.cargo("build -Z target-applies-to-host --target") - .arg(&target) - .masquerade_as_nightly_cargo() - .run(); - } -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker_host_target_env() { - let target = rustc_host(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - linker = "/path/to/linker" - "#, - target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // no crate type set => linker never called => build succeeds if and - // only if build.rs succeeds, despite linker binary not existing. - if cargo_test_support::is_nightly() { - p.cargo("build -Z target-applies-to-host --target") - .env("CARGO_TARGET_APPLIES_TO_HOST", "false") - .arg(&target) - .masquerade_as_nightly_cargo() - .run(); - } -} - -#[cargo_test] -fn custom_build_invalid_host_config_feature_flag() { - let target = rustc_host(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - linker = "/path/to/linker" - "#, - target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // build.rs should fail due to -Zhost-config being set without -Ztarget-applies-to-host - if cargo_test_support::is_nightly() { - p.cargo("build -Z host-config --target") - .arg(&target) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains( - "\ -error: the -Zhost-config flag requires the -Ztarget-applies-to-host flag to be set -", - ) - .run(); - } -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker_host_target_with_bad_host_config() { - let target = rustc_host(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - target-applies-to-host = true - [host] - linker = "/path/to/host/linker" - [target.{}] - linker = "/path/to/target/linker" - "#, - target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/target/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // build.rs should fail due to bad target linker being set - if cargo_test_support::is_nightly() { - p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") - .arg(&target) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/target/linker [..]` -[ERROR] linker `[..]/path/to/target/linker` not found -" - ) - .run(); - } -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker_bad_host() { - let target = rustc_host(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [host] - linker = "/path/to/host/linker" - [target.{}] - linker = "/path/to/target/linker" - "#, - target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/target/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // build.rs should fail due to bad host linker being set - if cargo_test_support::is_nightly() { - p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") - .arg(&target) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/linker [..]` -[ERROR] linker `[..]/path/to/host/linker` not found -" - ) - .run(); - } -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker_bad_host_with_arch() { - let target = rustc_host(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [host] - linker = "/path/to/host/linker" - [host.{}] - linker = "/path/to/host/arch/linker" - [target.{}] - linker = "/path/to/target/linker" - "#, - target, target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/target/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // build.rs should fail due to bad host linker being set - if cargo_test_support::is_nightly() { - p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") - .arg(&target) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/arch/linker [..]` -[ERROR] linker `[..]/path/to/host/arch/linker` not found -" - ) - .run(); - } -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker_cross_arch_host() { - let target = rustc_host(); - let cross_target = cross_compile::alternate(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [host.{}] - linker = "/path/to/host/arch/linker" - [target.{}] - linker = "/path/to/target/linker" - "#, - cross_target, target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/target/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // build.rs should fail due to bad host linker being set - if cargo_test_support::is_nightly() { - p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") - .arg(&target) - .masquerade_as_nightly_cargo() - .run(); - } -} - -#[cargo_test] -fn custom_build_env_var_rustc_linker_bad_cross_arch_host() { - let target = rustc_host(); - let cross_target = cross_compile::alternate(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [host] - linker = "/path/to/host/linker" - [host.{}] - linker = "/path/to/host/arch/linker" - [target.{}] - linker = "/path/to/target/linker" - "#, - cross_target, target - ), - ) - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("RUSTC_LINKER").unwrap().ends_with("/path/to/target/linker")); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // build.rs should fail due to bad host linker being set - if cargo_test_support::is_nightly() { - p.cargo("build -Z target-applies-to-host -Z host-config --verbose --target") - .arg(&target) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin [..]-C linker=[..]/path/to/host/linker [..]` -[ERROR] linker `[..]/path/to/host/linker` not found -" - ) - .run(); - } -} - -#[cargo_test] -fn custom_build_script_wrong_rustc_flags() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "build.rs", - r#"fn main() { println!("cargo:rustc-flags=-aaa -bbb"); }"#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "[ERROR] Only `-l` and `-L` flags are allowed in build script of `foo v0.5.0 ([CWD])`: \ - `-aaa -bbb`", - ) - .run(); -} - -#[cargo_test] -fn custom_build_script_rustc_flags() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.foo] - path = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "foo/Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - "#, - ) - .file("foo/src/lib.rs", "") - .file( - "foo/build.rs", - r#" - fn main() { - println!("cargo:rustc-flags=-l nonexistinglib -L /dummy/path1 -L /dummy/path2"); - } - "#, - ) - .build(); - - p.cargo("build --verbose") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name build_script_build foo/build.rs [..] -[RUNNING] `[..]build-script-build` -[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ - -L dependency=[CWD]/target/debug/deps \ - -L /dummy/path1 -L /dummy/path2 -l nonexistinglib` -[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar src/main.rs [..]\ - -L dependency=[CWD]/target/debug/deps \ - --extern foo=[..]libfoo-[..] \ - -L /dummy/path1 -L /dummy/path2` -[FINISHED] dev [..] -", - ) - .run(); -} - -#[cargo_test] -fn custom_build_script_rustc_flags_no_space() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.foo] - path = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "foo/Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - "#, - ) - .file("foo/src/lib.rs", "") - .file( - "foo/build.rs", - r#" - fn main() { - println!("cargo:rustc-flags=-lnonexistinglib -L/dummy/path1 -L/dummy/path2"); - } - "#, - ) - .build(); - - p.cargo("build --verbose") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name build_script_build foo/build.rs [..] -[RUNNING] `[..]build-script-build` -[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ - -L dependency=[CWD]/target/debug/deps \ - -L /dummy/path1 -L /dummy/path2 -l nonexistinglib` -[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar src/main.rs [..]\ - -L dependency=[CWD]/target/debug/deps \ - --extern foo=[..]libfoo-[..] \ - -L /dummy/path1 -L /dummy/path2` -[FINISHED] dev [..] -", - ) - .run(); -} - -#[cargo_test] -fn links_no_build_cmd() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "a" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \ -not have a custom build script -", - ) - .run(); -} - -#[cargo_test] -fn links_duplicates() { - // this tests that the links_duplicates are caught at resolver time - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - - [dependencies.a-sys] - path = "a-sys" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "") - .file( - "a-sys/Cargo.toml", - r#" - [project] - name = "a-sys" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - "#, - ) - .file("a-sys/src/lib.rs", "") - .file("a-sys/build.rs", "") - .build(); - - p.cargo("build").with_status(101) - .with_stderr("\ -error: failed to select a version for `a-sys`. - ... required by package `foo v0.5.0 ([..])` -versions that meet the requirements `*` are: 0.5.0 - -the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: -package `foo v0.5.0 ([..])` -Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a-sys' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links. - -failed to select a version for `a-sys` which could resolve this conflict -").run(); -} - -#[cargo_test] -fn links_duplicates_old_registry() { - // Test old links validator. See `validate_links`. - Package::new("bar", "0.1.0") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - links = "a" - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - links = "a" - - [dependencies] - bar = "0.1" - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 ([..]) -[ERROR] multiple packages link to native library `a`, \ - but a native library can be linked only once - -package `bar v0.1.0` - ... which satisfies dependency `bar = \"^0.1\"` (locked to 0.1.0) of package `foo v0.1.0 ([..]foo)` -links to native library `a` - -package `foo v0.1.0 ([..]foo)` -also links to native library `a` -", - ) - .run(); -} - -#[cargo_test] -fn links_duplicates_deep_dependency() { - // this tests that the links_duplicates are caught at resolver time - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a-sys] - path = "a-sys" - "#, - ) - .file("a/src/lib.rs", "") - .file("a/build.rs", "") - .file( - "a/a-sys/Cargo.toml", - r#" - [project] - name = "a-sys" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - "#, - ) - .file("a/a-sys/src/lib.rs", "") - .file("a/a-sys/build.rs", "") - .build(); - - p.cargo("build").with_status(101) - .with_stderr("\ -error: failed to select a version for `a-sys`. - ... required by package `a v0.5.0 ([..])` - ... which satisfies path dependency `a` of package `foo v0.5.0 ([..])` -versions that meet the requirements `*` are: 0.5.0 - -the package `a-sys` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: -package `foo v0.5.0 ([..])` -Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a-sys' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links. - -failed to select a version for `a-sys` which could resolve this conflict -").run(); -} - -#[cargo_test] -fn overrides_and_links() { - let target = rustc_host(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - fn main() { - assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"), - "bar"); - assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"), - "baz"); - } - "#, - ) - .file( - ".cargo/config", - &format!( - r#" - [target.{}.foo] - rustc-flags = "-L foo -L bar" - foo = "bar" - bar = "baz" - "#, - target - ), - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("a/src/lib.rs", "") - .file("a/build.rs", "not valid rust code") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[..] -[..] -[..] -[..] -[..] -[RUNNING] `rustc --crate-name foo [..] -L foo -L bar` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn unused_overrides() { - let target = rustc_host(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file( - ".cargo/config", - &format!( - r#" - [target.{}.foo] - rustc-flags = "-L foo -L bar" - foo = "bar" - bar = "baz" - "#, - target - ), - ) - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn links_passes_env_vars() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - fn main() { - assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); - assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); - } - "#, - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/build.rs", - r#" - use std::env; - fn main() { - let lib = env::var("CARGO_MANIFEST_LINKS").unwrap(); - assert_eq!(lib, "foo"); - - println!("cargo:foo=bar"); - println!("cargo:bar=baz"); - } - "#, - ) - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn only_rerun_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - p.cargo("build -v").run(); - p.root().move_into_the_past(); - - p.change_file("some-new-file", ""); - p.root().move_into_the_past(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc --crate-name foo [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rebuild_continues_to_pass_env_vars() { - let a = project() - .at("a") - .file( - "Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::time::Duration; - fn main() { - println!("cargo:foo=bar"); - println!("cargo:bar=baz"); - std::thread::sleep(Duration::from_millis(500)); - } - "#, - ) - .build(); - a.root().move_into_the_past(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a] - path = '{}' - "#, - a.root().display() - ), - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - fn main() { - assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar"); - assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz"); - } - "#, - ) - .build(); - - p.cargo("build -v").run(); - p.root().move_into_the_past(); - - p.change_file("some-new-file", ""); - p.root().move_into_the_past(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn testing_and_such() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - println!("build"); - p.cargo("build -v").run(); - p.root().move_into_the_past(); - - p.change_file("src/lib.rs", ""); - p.root().move_into_the_past(); - - println!("test"); - p.cargo("test -vj1") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc --crate-name foo [..]` -[RUNNING] `rustc --crate-name foo [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]/foo-[..][EXE]` -[DOCTEST] foo -[RUNNING] `rustdoc [..]--test [..]`", - ) - .with_stdout_contains_n("running 0 tests", 2) - .run(); - - println!("doc"); - p.cargo("doc -v") - .with_stderr( - "\ -[DOCUMENTING] foo v0.5.0 ([CWD]) -[RUNNING] `rustdoc [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.change_file("src/main.rs", "fn main() {}"); - println!("run"); - p.cargo("run") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo[EXE]` -", - ) - .run(); -} - -#[cargo_test] -fn propagation_of_l_flags() { - let target = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "bar" - build = "build.rs" - - [dependencies.b] - path = "../b" - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/build.rs", - r#"fn main() { println!("cargo:rustc-flags=-L bar"); }"#, - ) - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("b/src/lib.rs", "") - .file("b/build.rs", "bad file") - .file( - ".cargo/config", - &format!( - r#" - [target.{}.foo] - rustc-flags = "-L foo" - "#, - target - ), - ) - .build(); - - p.cargo("build -v -j1") - .with_stderr_contains( - "\ -[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc --crate-name foo [..] -L bar -L foo` -", - ) - .run(); -} - -#[cargo_test] -fn propagation_of_l_flags_new() { - let target = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "bar" - build = "build.rs" - - [dependencies.b] - path = "../b" - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=bar"); - } - "#, - ) - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("b/src/lib.rs", "") - .file("b/build.rs", "bad file") - .file( - ".cargo/config", - &format!( - r#" - [target.{}.foo] - rustc-link-search = ["foo"] - "#, - target - ), - ) - .build(); - - p.cargo("build -v -j1") - .with_stderr_contains( - "\ -[RUNNING] `rustc --crate-name a [..] -L bar[..]-L foo[..]` -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc --crate-name foo [..] -L bar -L foo` -", - ) - .run(); -} - -#[cargo_test] -fn build_deps_simple() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - [build-dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - " - #[allow(unused_extern_crates)] - extern crate a; - fn main() {} - ", - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] a v0.5.0 ([CWD]/a) -[RUNNING] `rustc --crate-name a [..]` -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc [..] build.rs [..] --extern a=[..]` -[RUNNING] `[..]/foo-[..]/build-script-build` -[RUNNING] `rustc --crate-name foo [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_deps_not_for_normal() { - let target = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - [build-dependencies.aaaaa] - path = "a" - "#, - ) - .file( - "src/lib.rs", - "#[allow(unused_extern_crates)] extern crate aaaaa;", - ) - .file( - "build.rs", - " - #[allow(unused_extern_crates)] - extern crate aaaaa; - fn main() {} - ", - ) - .file("a/Cargo.toml", &basic_manifest("aaaaa", "0.5.0")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build -v --target") - .arg(&target) - .with_status(101) - .with_stderr_contains("[..]can't find crate for `aaaaa`[..]") - .with_stderr_contains( - "\ -[ERROR] could not compile `foo` due to previous error - -Caused by: - process didn't exit successfully: [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_cmd_with_a_build_cmd() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [build-dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - " - #[allow(unused_extern_crates)] - extern crate a; - fn main() {} - ", - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - - [build-dependencies.b] - path = "../b" - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/build.rs", - "#[allow(unused_extern_crates)] extern crate b; fn main() {}", - ) - .file("b/Cargo.toml", &basic_manifest("b", "0.5.0")) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] b v0.5.0 ([CWD]/b) -[RUNNING] `rustc --crate-name b [..]` -[COMPILING] a v0.5.0 ([CWD]/a) -[RUNNING] `rustc [..] a/build.rs [..] --extern b=[..]` -[RUNNING] `[..]/a-[..]/build-script-build` -[RUNNING] `rustc --crate-name a [..]lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..]target/debug/deps \ - -L [..]target/debug/deps` -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc --crate-name build_script_build build.rs [..]--crate-type bin \ - --emit=[..]link[..]\ - -C debuginfo=2 -C metadata=[..] --out-dir [..] \ - -L [..]target/debug/deps \ - --extern a=[..]liba[..].rlib` -[RUNNING] `[..]/foo-[..]/build-script-build` -[RUNNING] `rustc --crate-name foo [..]lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L [..]target/debug/deps` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn out_dir_is_preserved() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - use std::fs::File; - use std::path::Path; - fn main() { - let out = env::var("OUT_DIR").unwrap(); - File::create(Path::new(&out).join("foo")).unwrap(); - } - "#, - ) - .build(); - - // Make the file - p.cargo("build -v").run(); - - // Change to asserting that it's there - p.change_file( - "build.rs", - r#" - use std::env; - use std::fs::File; - use std::path::Path; - fn main() { - let out = env::var("OUT_DIR").unwrap(); - File::open(&Path::new(&out).join("foo")).unwrap(); - } - "#, - ); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name build_script_build [..] -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc --crate-name foo [..] -[FINISHED] [..] -", - ) - .run(); - - // Run a fresh build where file should be preserved - p.cargo("build -v") - .with_stderr( - "\ -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); - - // One last time to make sure it's still there. - p.change_file("foo", ""); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `[..]build-script-build` -[RUNNING] `rustc --crate-name foo [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn output_separate_lines() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-flags=-L foo"); - println!("cargo:rustc-flags=-l static=foo"); - } - "#, - ) - .build(); - p.cargo("build -v") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc [..] build.rs [..]` -[RUNNING] `[..]/foo-[..]/build-script-build` -[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` -[ERROR] could not find native static library [..] -", - ) - .run(); -} - -#[cargo_test] -fn output_separate_lines_new() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=foo"); - println!("cargo:rustc-link-lib=static=foo"); - } - "#, - ) - .build(); - p.cargo("build -v") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc [..] build.rs [..]` -[RUNNING] `[..]/foo-[..]/build-script-build` -[RUNNING] `rustc --crate-name foo [..] -L foo -l static=foo` -[ERROR] could not find native static library [..] -", - ) - .run(); -} - -#[cfg(not(windows))] // FIXME(#867) -#[cargo_test] -fn code_generation() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "src/main.rs", - r#" - include!(concat!(env!("OUT_DIR"), "/hello.rs")); - - fn main() { - println!("{}", message()); - } - "#, - ) - .file( - "build.rs", - r#" - use std::env; - use std::fs; - use std::path::PathBuf; - - fn main() { - let dst = PathBuf::from(env::var("OUT_DIR").unwrap()); - fs::write(dst.join("hello.rs"), - " - pub fn message() -> &'static str { - \"Hello, World!\" - } - ") - .unwrap(); - } - "#, - ) - .build(); - - p.cargo("run") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo`", - ) - .with_stdout("Hello, World!") - .run(); - - p.cargo("test").run(); -} - -#[cargo_test] -fn release_with_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() {} - "#, - ) - .build(); - - p.cargo("build -v --release").run(); -} - -#[cargo_test] -fn build_script_only() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - "#, - ) - .file("build.rs", r#"fn main() {}"#) - .build(); - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - no targets specified in the manifest - either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present", - ) - .run(); -} - -#[cargo_test] -fn shared_dep_with_a_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.a] - path = "a" - - [build-dependencies.b] - path = "b" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("a/build.rs", "fn main() {}") - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.5.0" - authors = [] - - [dependencies.a] - path = "../a" - "#, - ) - .file("b/src/lib.rs", "") - .build(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn transitive_dep_host() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [build-dependencies.b] - path = "b" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("a/build.rs", "fn main() {}") - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.5.0" - authors = [] - - [lib] - name = "b" - plugin = true - - [dependencies.a] - path = "../a" - "#, - ) - .file("b/src/lib.rs", "") - .build(); - p.cargo("build").run(); -} - -#[cargo_test] -fn test_a_lib_with_a_build_command() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "src/lib.rs", - r#" - include!(concat!(env!("OUT_DIR"), "/foo.rs")); - - /// ``` - /// foo::bar(); - /// ``` - pub fn bar() { - assert_eq!(foo(), 1); - } - "#, - ) - .file( - "build.rs", - r#" - use std::env; - use std::fs; - use std::path::PathBuf; - - fn main() { - let out = PathBuf::from(env::var("OUT_DIR").unwrap()); - fs::write(out.join("foo.rs"), "fn foo() -> i32 { 1 }").unwrap(); - } - "#, - ) - .build(); - p.cargo("test").run(); -} - -#[cargo_test] -fn test_dev_dep_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dev-dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("a/build.rs", "fn main() {}") - .file("a/src/lib.rs", "") - .build(); - - p.cargo("test").run(); -} - -#[cargo_test] -fn build_script_with_dynamic_native_dependency() { - let build = project() - .at("builder") - .file( - "Cargo.toml", - r#" - [package] - name = "builder" - version = "0.0.1" - authors = [] - - [lib] - name = "builder" - crate-type = ["dylib"] - "#, - ) - .file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}") - .build(); - - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [build-dependencies.bar] - path = "bar" - "#, - ) - .file("build.rs", "extern crate bar; fn main() { bar::bar() }") - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "bar/build.rs", - r#" - use std::env; - use std::fs; - use std::path::PathBuf; - - fn main() { - let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); - let root = PathBuf::from(env::var("BUILDER_ROOT").unwrap()); - let file = format!("{}builder{}", - env::consts::DLL_PREFIX, - env::consts::DLL_SUFFIX); - let src = root.join(&file); - let dst = out_dir.join(&file); - fs::copy(src, dst).unwrap(); - if cfg!(target_env = "msvc") { - fs::copy(root.join("builder.dll.lib"), - out_dir.join("builder.dll.lib")).unwrap(); - } - println!("cargo:rustc-link-search=native={}", out_dir.display()); - } - "#, - ) - .file( - "bar/src/lib.rs", - r#" - pub fn bar() { - #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] - #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] - extern { fn foo(); } - unsafe { foo() } - } - "#, - ) - .build(); - - build - .cargo("build -v") - .env("CARGO_LOG", "cargo::ops::cargo_rustc") - .run(); - - let root = build.root().join("target").join("debug"); - foo.cargo("build -v") - .env("BUILDER_ROOT", root) - .env("CARGO_LOG", "cargo::ops::cargo_rustc") - .run(); -} - -#[cargo_test] -fn profile_and_opt_level_set_correctly() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert_eq!(env::var("OPT_LEVEL").unwrap(), "3"); - assert_eq!(env::var("PROFILE").unwrap(), "release"); - assert_eq!(env::var("DEBUG").unwrap(), "false"); - } - "#, - ) - .build(); - p.cargo("bench").run(); -} - -#[cargo_test] -fn profile_debug_0() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [profile.dev] - debug = 0 - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert_eq!(env::var("OPT_LEVEL").unwrap(), "0"); - assert_eq!(env::var("PROFILE").unwrap(), "debug"); - assert_eq!(env::var("DEBUG").unwrap(), "false"); - } - "#, - ) - .build(); - p.cargo("build").run(); -} - -#[cargo_test] -fn build_script_with_lto() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [profile.dev] - lto = true - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - p.cargo("build").run(); -} - -#[cargo_test] -fn test_duplicate_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - build = "build.rs" - - [dependencies.bar] - path = "bar" - - [build-dependencies.bar] - path = "bar" - "#, - ) - .file( - "src/main.rs", - r#" - extern crate bar; - fn main() { bar::do_nothing() } - "#, - ) - .file( - "build.rs", - r#" - extern crate bar; - fn main() { bar::do_nothing() } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn do_nothing() {}") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn cfg_feedback() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("src/main.rs", "#[cfg(foo)] fn main() {}") - .file( - "build.rs", - r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, - ) - .build(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn cfg_override() { - let target = rustc_host(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - "#, - ) - .file("src/main.rs", "#[cfg(foo)] fn main() {}") - .file("build.rs", "") - .file( - ".cargo/config", - &format!( - r#" - [target.{}.a] - rustc-cfg = ["foo"] - "#, - target - ), - ) - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn cfg_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, - ) - .file( - "src/lib.rs", - r#" - /// - /// ``` - /// extern crate foo; - /// - /// fn main() { - /// foo::foo() - /// } - /// ``` - /// - #[cfg(foo)] - pub fn foo() {} - - #[cfg(foo)] - #[test] - fn test_foo() { - foo() - } - "#, - ) - .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}") - .build(); - p.cargo("test -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] [..] build.rs [..] -[RUNNING] `[..]/build-script-build` -[RUNNING] [..] --cfg foo[..] -[RUNNING] [..] --cfg foo[..] -[RUNNING] [..] --cfg foo[..] -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]/foo-[..][EXE]` -[RUNNING] `[..]/test-[..][EXE]` -[DOCTEST] foo -[RUNNING] [..] --cfg foo[..]", - ) - .with_stdout_contains("test test_foo ... ok") - .with_stdout_contains("test test_bar ... ok") - .with_stdout_contains_n("test [..] ... ok", 3) - .run(); -} - -#[cargo_test] -fn cfg_doc() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - "build.rs", - r#"fn main() { println!("cargo:rustc-cfg=foo"); }"#, - ) - .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "bar/build.rs", - r#"fn main() { println!("cargo:rustc-cfg=bar"); }"#, - ) - .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}") - .build(); - p.cargo("doc").run(); - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); - assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); -} - -#[cargo_test] -fn cfg_override_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - links = "a" - "#, - ) - .file("build.rs", "") - .file( - ".cargo/config", - &format!( - r#" - [target.{}.a] - rustc-cfg = ["foo"] - "#, - rustc_host() - ), - ) - .file( - "src/lib.rs", - r#" - /// - /// ``` - /// extern crate foo; - /// - /// fn main() { - /// foo::foo() - /// } - /// ``` - /// - #[cfg(foo)] - pub fn foo() {} - - #[cfg(foo)] - #[test] - fn test_foo() { - foo() - } - "#, - ) - .file("tests/test.rs", "#[cfg(foo)] #[test] fn test_bar() {}") - .build(); - p.cargo("test -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `[..]` -[RUNNING] `[..]` -[RUNNING] `[..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]/foo-[..][EXE]` -[RUNNING] `[..]/test-[..][EXE]` -[DOCTEST] foo -[RUNNING] [..] --cfg foo[..]", - ) - .with_stdout_contains("test test_foo ... ok") - .with_stdout_contains("test test_bar ... ok") - .with_stdout_contains_n("test [..] ... ok", 3) - .run(); -} - -#[cargo_test] -fn cfg_override_doc() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - links = "a" - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - ".cargo/config", - &format!( - r#" - [target.{target}.a] - rustc-cfg = ["foo"] - [target.{target}.b] - rustc-cfg = ["bar"] - "#, - target = rustc_host() - ), - ) - .file("build.rs", "") - .file("src/lib.rs", "#[cfg(foo)] pub fn foo() {}") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - build = "build.rs" - links = "b" - "#, - ) - .file("bar/build.rs", "") - .file("bar/src/lib.rs", "#[cfg(bar)] pub fn bar() {}") - .build(); - p.cargo("doc").run(); - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); - assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); -} - -#[cargo_test] -fn env_build() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "src/main.rs", - r#" - const FOO: &'static str = env!("FOO"); - fn main() { - println!("{}", FOO); - } - "#, - ) - .file( - "build.rs", - r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, - ) - .build(); - p.cargo("build -v").run(); - p.cargo("run -v").with_stdout("foo\n").run(); -} - -#[cargo_test] -fn env_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, - ) - .file( - "src/lib.rs", - r#"pub const FOO: &'static str = env!("FOO"); "#, - ) - .file( - "tests/test.rs", - r#" - extern crate foo; - - #[test] - fn test_foo() { - assert_eq!("foo", foo::FOO); - } - "#, - ) - .build(); - p.cargo("test -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] [..] build.rs [..] -[RUNNING] `[..]/build-script-build` -[RUNNING] [..] --crate-name foo[..] -[RUNNING] [..] --crate-name foo[..] -[RUNNING] [..] --crate-name test[..] -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]/foo-[..][EXE]` -[RUNNING] `[..]/test-[..][EXE]` -[DOCTEST] foo -[RUNNING] [..] --crate-name foo[..]", - ) - .with_stdout_contains_n("running 0 tests", 2) - .with_stdout_contains("test test_foo ... ok") - .run(); -} - -#[cargo_test] -fn env_doc() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "src/main.rs", - r#" - const FOO: &'static str = env!("FOO"); - fn main() {} - "#, - ) - .file( - "build.rs", - r#"fn main() { println!("cargo:rustc-env=FOO=foo"); }"#, - ) - .build(); - p.cargo("doc -v").run(); -} - -#[cargo_test] -fn flags_go_into_tests() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - b = { path = "b" } - "#, - ) - .file("src/lib.rs", "") - .file("tests/foo.rs", "") - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies] - a = { path = "../a" } - "#, - ) - .file("b/src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=test"); - } - "#, - ) - .build(); - - p.cargo("test -v --test=foo") - .with_stderr( - "\ -[COMPILING] a v0.5.0 ([..] -[RUNNING] `rustc [..] a/build.rs [..]` -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc [..] a/src/lib.rs [..] -L test[..]` -[COMPILING] b v0.5.0 ([..] -[RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]` -[COMPILING] foo v0.5.0 ([..] -[RUNNING] `rustc [..] src/lib.rs [..] -L test[..]` -[RUNNING] `rustc [..] tests/foo.rs [..] -L test[..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]/foo-[..][EXE]`", - ) - .with_stdout_contains("running 0 tests") - .run(); - - p.cargo("test -v -pb --lib") - .with_stderr( - "\ -[FRESH] a v0.5.0 ([..] -[COMPILING] b v0.5.0 ([..] -[RUNNING] `rustc [..] b/src/lib.rs [..] -L test[..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]/b-[..][EXE]`", - ) - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn diamond_passes_args_only_once() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = { path = "a" } - b = { path = "b" } - "#, - ) - .file("src/lib.rs", "") - .file("tests/foo.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - [dependencies] - b = { path = "../b" } - c = { path = "../c" } - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies] - c = { path = "../c" } - "#, - ) - .file("b/src/lib.rs", "") - .file( - "c/Cargo.toml", - r#" - [project] - name = "c" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "c/build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=native=test"); - } - "#, - ) - .file("c/src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] c v0.5.0 ([..] -[RUNNING] `rustc [..]` -[RUNNING] `[..]` -[RUNNING] `rustc [..]` -[COMPILING] b v0.5.0 ([..] -[RUNNING] `rustc [..]` -[COMPILING] a v0.5.0 ([..] -[RUNNING] `rustc [..]` -[COMPILING] foo v0.5.0 ([..] -[RUNNING] `[..]rmeta -L native=test` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn adding_an_override_invalidates() { - let target = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file(".cargo/config", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=native=foo"); - } - "#, - ) - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..] -[RUNNING] `rustc [..]` -[RUNNING] `[..]` -[RUNNING] `rustc [..] -L native=foo` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.change_file( - ".cargo/config", - &format!( - " - [target.{}.foo] - rustc-link-search = [\"native=bar\"] - ", - target - ), - ); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..] -[RUNNING] `rustc [..] -L native=bar` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn changing_an_override_invalidates() { - let target = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - &format!( - " - [target.{}.foo] - rustc-link-search = [\"native=foo\"] - ", - target - ), - ) - .file("build.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..] -[RUNNING] `rustc [..] -L native=foo` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.change_file( - ".cargo/config", - &format!( - " - [target.{}.foo] - rustc-link-search = [\"native=bar\"] - ", - target - ), - ); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..] -[RUNNING] `rustc [..] -L native=bar` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fresh_builds_possible_with_link_libs() { - // The bug is non-deterministic. Sometimes you can get a fresh build - let target = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "nativefoo" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - &format!( - " - [target.{}.nativefoo] - rustc-link-lib = [\"a\"] - rustc-link-search = [\"./b\"] - rustc-flags = \"-l z -L ./\" - ", - target - ), - ) - .file("build.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..] -[RUNNING] `rustc [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build -v") - .with_stderr( - "\ -[FRESH] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fresh_builds_possible_with_multiple_metadata_overrides() { - // The bug is non-deterministic. Sometimes you can get a fresh build - let target = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - &format!( - " - [target.{}.foo] - a = \"\" - b = \"\" - c = \"\" - d = \"\" - e = \"\" - ", - target - ), - ) - .file("build.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..] -[RUNNING] `rustc [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build -v") - .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint=info") - .with_stderr( - "\ -[FRESH] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn generate_good_d_files() { - // this is here to stop regression on an issue where build.rs rerun-if-changed paths aren't - // made absolute properly, which in turn interacts poorly with the dep-info-basedir setting, - // and the dep-info files have other-crate-relative paths spat out in them - let p = project() - .file( - "awoo/Cargo.toml", - r#" - [project] - name = "awoo" - version = "0.5.0" - build = "build.rs" - "#, - ) - .file("awoo/src/lib.rs", "") - .file( - "awoo/build.rs", - r#" - fn main() { - println!("cargo:rerun-if-changed=build.rs"); - println!("cargo:rerun-if-changed=barkbarkbark"); - } - "#, - ) - .file( - "Cargo.toml", - r#" - [project] - name = "meow" - version = "0.5.0" - [dependencies] - awoo = { path = "awoo" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v").run(); - - let dot_d_path = p.bin("meow").with_extension("d"); - println!("*meow at* {:?}", dot_d_path); - let dot_d = fs::read_to_string(&dot_d_path).unwrap(); - - println!("*.d file content*: {}", &dot_d); - - assert_match_exact( - "[..]/target/debug/meow[EXE]: [..]/awoo/barkbarkbark [..]/awoo/build.rs[..]", - &dot_d, - ); - - // paths relative to dependency roots should not be allowed - assert!(!dot_d - .split_whitespace() - .any(|v| v == "barkbarkbark" || v == "build.rs")); - - p.change_file( - ".cargo/config.toml", - r#" - [build] - dep-info-basedir="." - "#, - ); - p.cargo("build -v").run(); - - let dot_d = fs::read_to_string(&dot_d_path).unwrap(); - - println!("*.d file content with dep-info-basedir*: {}", &dot_d); - - assert_match_exact( - "target/debug/meow[EXE]: awoo/barkbarkbark awoo/build.rs[..]", - &dot_d, - ); - - // paths relative to dependency roots should not be allowed - assert!(!dot_d - .split_whitespace() - .any(|v| v == "barkbarkbark" || v == "build.rs")); -} - -#[cargo_test] -fn rebuild_only_on_explicit_paths() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-changed=foo"); - println!("cargo:rerun-if-changed=bar"); - } - "#, - ) - .build(); - - p.cargo("build -v").run(); - - // files don't exist, so should always rerun if they don't exist - println!("run without"); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc [..] src/lib.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - sleep_ms(1000); - p.change_file("foo", ""); - p.change_file("bar", ""); - sleep_ms(1000); // make sure the to-be-created outfile has a timestamp distinct from the infiles - - // now the exist, so run once, catch the mtime, then shouldn't run again - println!("run with"); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc [..] src/lib.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - println!("run with2"); - p.cargo("build -v") - .with_stderr( - "\ -[FRESH] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - sleep_ms(1000); - - // random other files do not affect freshness - println!("run baz"); - p.change_file("baz", ""); - p.cargo("build -v") - .with_stderr( - "\ -[FRESH] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - // but changing dependent files does - println!("run foo change"); - p.change_file("foo", ""); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc [..] src/lib.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - // .. as does deleting a file - println!("run foo delete"); - fs::remove_file(p.root().join("bar")).unwrap(); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc [..] src/lib.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn doctest_receives_build_link_args() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "bar" - build = "build.rs" - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=native=bar"); - } - "#, - ) - .build(); - - p.cargo("test -v") - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--crate-name foo --test [..]-L native=bar[..]`", - ) - .run(); -} - -#[cargo_test] -fn please_respect_the_dag() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies] - a = { path = 'a' } - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=native=foo"); - } - "#, - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "bar" - build = "build.rs" - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=native=bar"); - } - "#, - ) - .build(); - - p.cargo("build -v") - .with_stderr_contains("[RUNNING] `rustc [..] -L native=foo -L native=bar[..]`") - .run(); -} - -#[cargo_test] -fn non_utf8_output() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - r#" - use std::io::prelude::*; - - fn main() { - let mut out = std::io::stdout(); - // print something that's not utf8 - out.write_all(b"\xff\xff\n").unwrap(); - - // now print some cargo metadata that's utf8 - println!("cargo:rustc-cfg=foo"); - - // now print more non-utf8 - out.write_all(b"\xff\xff\n").unwrap(); - } - "#, - ) - .file("src/main.rs", "#[cfg(foo)] fn main() {}") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn custom_target_dir() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - target-dir = 'test' - "#, - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("a/build.rs", "fn main() {}") - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn panic_abort_with_build_scripts() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [profile.release] - panic = 'abort' - - [dependencies] - a = { path = "a" } - "#, - ) - .file( - "src/lib.rs", - "#[allow(unused_extern_crates)] extern crate a;", - ) - .file("build.rs", "fn main() {}") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - build = "build.rs" - - [build-dependencies] - b = { path = "../b" } - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/build.rs", - "#[allow(unused_extern_crates)] extern crate b; fn main() {}", - ) - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("build -v --release").run(); - - p.root().join("target").rm_rf(); - - p.cargo("test --release -v") - .with_stderr_does_not_contain("[..]panic[..]") - .run(); -} - -#[cargo_test] -fn warnings_emitted() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:warning=foo"); - println!("cargo:warning=bar"); - } - "#, - ) - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `rustc [..]` -[RUNNING] `[..]` -warning: foo -warning: bar -[RUNNING] `rustc [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn warnings_emitted_when_build_script_panics() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:warning=foo"); - println!("cargo:warning=bar"); - panic!(); - } - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stdout("") - .with_stderr_contains("warning: foo\nwarning: bar") - .run(); -} - -#[cargo_test] -fn warnings_hidden_for_upstream() { - Package::new("bar", "0.1.0") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:warning=foo"); - println!("cargo:warning=bar"); - } - "#, - ) - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 ([..]) -[COMPILING] bar v0.1.0 -[RUNNING] `rustc [..]` -[RUNNING] `[..]` -[RUNNING] `rustc [..]` -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `rustc [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn warnings_printed_on_vv() { - Package::new("bar", "0.1.0") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:warning=foo"); - println!("cargo:warning=bar"); - } - "#, - ) - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -vv") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 ([..]) -[COMPILING] bar v0.1.0 -[RUNNING] `[..] rustc [..]` -[RUNNING] `[..]` -warning: foo -warning: bar -[RUNNING] `[..] rustc [..]` -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `[..] rustc [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn output_shows_on_vv() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::io::prelude::*; - - fn main() { - std::io::stderr().write_all(b"stderr\n").unwrap(); - std::io::stdout().write_all(b"stdout\n").unwrap(); - } - "#, - ) - .build(); - - p.cargo("build -vv") - .with_stdout("[foo 0.5.0] stdout") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `[..] rustc [..]` -[RUNNING] `[..]` -[foo 0.5.0] stderr -[RUNNING] `[..] rustc [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn links_with_dots() { - let target = rustc_host(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - links = "a.b" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-search=bar") - } - "#, - ) - .file( - ".cargo/config", - &format!( - r#" - [target.{}.'a.b'] - rustc-link-search = ["foo"] - "#, - target - ), - ) - .build(); - - p.cargo("build -v") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..] [..] -L foo[..]`") - .run(); -} - -#[cargo_test] -fn rustc_and_rustdoc_set_correctly() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - - fn main() { - assert_eq!(env::var("RUSTC").unwrap(), "rustc"); - assert_eq!(env::var("RUSTDOC").unwrap(), "rustdoc"); - } - "#, - ) - .build(); - p.cargo("bench").run(); -} - -#[cargo_test] -fn cfg_env_vars_available() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - - fn main() { - let fam = env::var("CARGO_CFG_TARGET_FAMILY").unwrap(); - if cfg!(unix) { - assert_eq!(fam, "unix"); - } else { - assert_eq!(fam, "windows"); - } - } - "#, - ) - .build(); - p.cargo("bench").run(); -} - -#[cargo_test] -fn switch_features_rerun() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [features] - foo = [] - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - println!(include_str!(concat!(env!("OUT_DIR"), "/output"))); - } - "#, - ) - .file( - "build.rs", - r#" - use std::env; - use std::fs; - use std::path::Path; - - fn main() { - let out_dir = env::var_os("OUT_DIR").unwrap(); - let output = Path::new(&out_dir).join("output"); - - if env::var_os("CARGO_FEATURE_FOO").is_some() { - fs::write(output, "foo").unwrap(); - } else { - fs::write(output, "bar").unwrap(); - } - } - "#, - ) - .build(); - - p.cargo("build -v --features=foo").run(); - p.rename_run("foo", "with_foo").with_stdout("foo\n").run(); - p.cargo("build -v").run(); - p.rename_run("foo", "without_foo") - .with_stdout("bar\n") - .run(); - p.cargo("build -v --features=foo").run(); - p.rename_run("foo", "with_foo2").with_stdout("foo\n").run(); -} - -#[cargo_test] -fn assume_build_script_when_build_rs_present() { - let p = project() - .file( - "src/main.rs", - r#" - fn main() { - if ! cfg!(foo) { - panic!("the build script was not run"); - } - } - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-cfg=foo"); - } - "#, - ) - .build(); - - p.cargo("run -v").run(); -} - -#[cargo_test] -fn if_build_set_to_false_dont_treat_build_rs_as_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = false - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(foo) { - panic!("the build script was run"); - } - } - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-cfg=foo"); - } - "#, - ) - .build(); - - p.cargo("run -v").run(); -} - -#[cargo_test] -fn deterministic_rustc_dependency_flags() { - // This bug is non-deterministic hence the large number of dependencies - // in the hopes it will have a much higher chance of triggering it. - - Package::new("dep1", "0.1.0") - .file( - "Cargo.toml", - r#" - [project] - name = "dep1" - version = "0.1.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-flags=-L native=test1"); - } - "#, - ) - .file("src/lib.rs", "") - .publish(); - Package::new("dep2", "0.1.0") - .file( - "Cargo.toml", - r#" - [project] - name = "dep2" - version = "0.1.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-flags=-L native=test2"); - } - "#, - ) - .file("src/lib.rs", "") - .publish(); - Package::new("dep3", "0.1.0") - .file( - "Cargo.toml", - r#" - [project] - name = "dep3" - version = "0.1.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-flags=-L native=test3"); - } - "#, - ) - .file("src/lib.rs", "") - .publish(); - Package::new("dep4", "0.1.0") - .file( - "Cargo.toml", - r#" - [project] - name = "dep4" - version = "0.1.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-flags=-L native=test4"); - } - "#, - ) - .file("src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - dep1 = "*" - dep2 = "*" - dep3 = "*" - dep4 = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .with_stderr_contains( - "\ -[RUNNING] `rustc --crate-name foo [..] -L native=test1 -L native=test2 \ --L native=test3 -L native=test4` -", - ) - .run(); -} - -#[cargo_test] -fn links_duplicates_with_cycle() { - // this tests that the links_duplicates are caught at resolver time - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - - [dependencies.a] - path = "a" - - [dev-dependencies] - b = { path = "b" } - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - links = "a" - build = "build.rs" - "#, - ) - .file("a/src/lib.rs", "") - .file("a/build.rs", "") - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - - [dependencies] - foo = { path = ".." } - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("build").with_status(101) - .with_stderr("\ -error: failed to select a version for `a`. - ... required by package `foo v0.5.0 ([..])` -versions that meet the requirements `*` are: 0.5.0 - -the package `a` links to the native library `a`, but it conflicts with a previous package which links to `a` as well: -package `foo v0.5.0 ([..])` -Only one package in the dependency graph may specify the same links value. This helps ensure that only one copy of a native library is linked in the final binary. Try to adjust your dependencies so that only one package uses the links ='a' value. For more information, see https://doc.rust-lang.org/cargo/reference/resolver.html#links. - -failed to select a version for `a` which could resolve this conflict -").run(); -} - -#[cargo_test] -fn rename_with_link_search_path() { - _rename_with_link_search_path(false); -} - -#[cargo_test] -// Don't have a cdylib cross target on macos. -#[cfg_attr(target_os = "macos", ignore)] -fn rename_with_link_search_path_cross() { - if cross_compile::disabled() { - return; - } - - _rename_with_link_search_path(true); -} - -fn _rename_with_link_search_path(cross: bool) { - let target_arg = if cross { - format!(" --target={}", cross_compile::alternate()) - } else { - "".to_string() - }; - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [lib] - crate-type = ["cdylib"] - "#, - ) - .file( - "src/lib.rs", - "#[no_mangle] pub extern fn cargo_test_foo() {}", - ); - let p = p.build(); - - p.cargo(&format!("build{}", target_arg)).run(); - - let p2 = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file( - "build.rs", - r#" - use std::env; - use std::fs; - use std::path::PathBuf; - - fn main() { - // Move the `libfoo.so` from the root of our project into the - // build directory. This way Cargo should automatically manage - // `LD_LIBRARY_PATH` and such. - let root = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); - let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); - let src = root.join(&file); - - let dst_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); - let dst = dst_dir.join(&file); - - fs::copy(&src, &dst).unwrap(); - // handle windows, like below - drop(fs::copy(root.join("foo.dll.lib"), dst_dir.join("foo.dll.lib"))); - - println!("cargo:rerun-if-changed=build.rs"); - if cfg!(target_env = "msvc") { - println!("cargo:rustc-link-lib=foo.dll"); - } else { - println!("cargo:rustc-link-lib=foo"); - } - println!("cargo:rustc-link-search=all={}", - dst.parent().unwrap().display()); - } - "#, - ) - .file( - "src/main.rs", - r#" - extern { - #[link_name = "cargo_test_foo"] - fn foo(); - } - - fn main() { - unsafe { foo(); } - } - "#, - ); - let p2 = p2.build(); - - // Move the output `libfoo.so` into the directory of `p2`, and then delete - // the `p` project. On macOS, the `libfoo.dylib` artifact references the - // original path in `p` so we want to make sure that it can't find it (hence - // the deletion). - let root = if cross { - p.root() - .join("target") - .join(cross_compile::alternate()) - .join("debug") - .join("deps") - } else { - p.root().join("target").join("debug").join("deps") - }; - let file = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX); - let src = root.join(&file); - - let dst = p2.root().join(&file); - - fs::copy(&src, &dst).unwrap(); - // copy the import library for windows, if it exists - drop(fs::copy( - &root.join("foo.dll.lib"), - p2.root().join("foo.dll.lib"), - )); - remove_dir_all(p.root()).unwrap(); - - // Everything should work the first time - p2.cargo(&format!("run{}", target_arg)).run(); - - // Now rename the root directory and rerun `cargo run`. Not only should we - // not build anything but we also shouldn't crash. - let mut new = p2.root(); - new.pop(); - new.push("bar2"); - - // For whatever reason on Windows right after we execute a binary it's very - // unlikely that we're able to successfully delete or rename that binary. - // It's not really clear why this is the case or if it's a bug in Cargo - // holding a handle open too long. In an effort to reduce the flakiness of - // this test though we throw this in a loop - // - // For some more information see #5481 and rust-lang/rust#48775 - let mut i = 0; - loop { - let error = match fs::rename(p2.root(), &new) { - Ok(()) => break, - Err(e) => e, - }; - i += 1; - if !cfg!(windows) || error.kind() != io::ErrorKind::PermissionDenied || i > 10 { - panic!("failed to rename: {}", error); - } - println!("assuming {} is spurious, waiting to try again", error); - thread::sleep(slow_cpu_multiplier(100)); - } - - p2.cargo(&format!("run{}", target_arg)) - .cwd(&new) - .with_stderr( - "\ -[FINISHED] [..] -[RUNNING] [..] -", - ) - .run(); -} - -#[cargo_test] -fn optional_build_script_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - bar = { path = "bar", optional = true } - - [build-dependencies] - bar = { path = "bar", optional = true } - "#, - ) - .file( - "build.rs", - r#" - #[cfg(feature = "bar")] - extern crate bar; - - fn main() { - #[cfg(feature = "bar")] { - println!("cargo:rustc-env=FOO={}", bar::bar()); - return - } - println!("cargo:rustc-env=FOO=0"); - } - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(feature = "bar")] - extern crate bar; - - fn main() { - println!("{}", env!("FOO")); - } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }"); - let p = p.build(); - - p.cargo("run").with_stdout("0\n").run(); - p.cargo("run --features bar").with_stdout("1\n").run(); -} - -#[cargo_test] -fn optional_build_dep_and_required_normal_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "./bar", optional = true } - - [build-dependencies] - bar = { path = "./bar" } - "#, - ) - .file("build.rs", "extern crate bar; fn main() { bar::bar(); }") - .file( - "src/main.rs", - r#" - #[cfg(feature = "bar")] - extern crate bar; - - fn main() { - #[cfg(feature = "bar")] { - println!("{}", bar::bar()); - } - #[cfg(not(feature = "bar"))] { - println!("0"); - } - } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("bar/src/lib.rs", "pub fn bar() -> u32 { 1 }"); - let p = p.build(); - - p.cargo("run") - .with_stdout("0") - .with_stderr( - "\ -[COMPILING] bar v0.5.0 ([..]) -[COMPILING] foo v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]foo[EXE]`", - ) - .run(); - - p.cargo("run --all-features") - .with_stdout("1") - .with_stderr( - "\ -[COMPILING] foo v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]foo[EXE]`", - ) - .run(); -} - -#[cargo_test] -fn using_rerun_if_changed_does_not_rebuild() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-changed=build.rs"); - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn links_interrupted_can_restart() { - // Test for a `links` dependent build script getting canceled and then - // restarted. Steps: - // 1. Build to establish fingerprints. - // 2. Change something (an env var in this case) that triggers the - // dependent build script to run again. Kill the top-level build script - // while it is running (such as hitting Ctrl-C). - // 3. Run the build again, it should re-run the build script. - let bar = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.5.0" - authors = [] - links = "foo" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-env-changed=SOMEVAR"); - } - "#, - ) - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - - [dependencies.bar] - path = '{}' - "#, - bar.root().display() - ), - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - fn main() { - println!("cargo:rebuild-if-changed=build.rs"); - if std::path::Path::new("abort").exists() { - panic!("Crash!"); - } - } - "#, - ) - .build(); - - p.cargo("build").run(); - // Simulate the user hitting Ctrl-C during a build. - p.change_file("abort", ""); - // Set SOMEVAR to trigger a rebuild. - p.cargo("build") - .env("SOMEVAR", "1") - .with_stderr_contains("[..]Crash![..]") - .with_status(101) - .run(); - fs::remove_file(p.root().join("abort")).unwrap(); - // Try again without aborting the script. - // ***This is currently broken, the script does not re-run. - p.cargo("build -v") - .env("SOMEVAR", "1") - .with_stderr_contains("[RUNNING] [..]/foo-[..]/build-script-build[..]") - .run(); -} - -#[cargo_test] -fn dev_dep_with_links() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - links = "x" - - [dev-dependencies] - bar = { path = "./bar" } - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - links = "y" - - [dependencies] - foo = { path = ".." } - "#, - ) - .file("bar/build.rs", "fn main() {}") - .file("bar/src/lib.rs", "") - .build(); - p.cargo("check --tests").run() -} - -#[cargo_test] -fn rerun_if_directory() { - if !symlink_supported() { - return; - } - - // rerun-if-changed of a directory should rerun if any file in the directory changes. - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-changed=somedir"); - } - "#, - ) - .build(); - - let dirty = || { - p.cargo("check") - .with_stderr( - "[COMPILING] foo [..]\n\ - [FINISHED] [..]", - ) - .run(); - }; - - let fresh = || { - p.cargo("check").with_stderr("[FINISHED] [..]").run(); - }; - - // Start with a missing directory. - dirty(); - // Because the directory doesn't exist, it will trigger a rebuild every time. - // https://github.com/rust-lang/cargo/issues/6003 - dirty(); - - if is_coarse_mtime() { - sleep_ms(1000); - } - - // Empty directory. - fs::create_dir(p.root().join("somedir")).unwrap(); - dirty(); - fresh(); - - if is_coarse_mtime() { - sleep_ms(1000); - } - - // Add a file. - p.change_file("somedir/foo", ""); - p.change_file("somedir/bar", ""); - dirty(); - fresh(); - - if is_coarse_mtime() { - sleep_ms(1000); - } - - // Add a symlink. - p.symlink("foo", "somedir/link"); - dirty(); - fresh(); - - if is_coarse_mtime() { - sleep_ms(1000); - } - - // Move the symlink. - fs::remove_file(p.root().join("somedir/link")).unwrap(); - p.symlink("bar", "somedir/link"); - dirty(); - fresh(); - - if is_coarse_mtime() { - sleep_ms(1000); - } - - // Remove a file. - fs::remove_file(p.root().join("somedir/foo")).unwrap(); - dirty(); - fresh(); -} - -#[cargo_test] -fn test_with_dep_metadata() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = 'bar' } - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - assert_eq!(std::env::var("DEP_BAR_FOO").unwrap(), "bar"); - } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - links = 'bar' - "#, - ) - .file("bar/src/lib.rs", "") - .file( - "bar/build.rs", - r#" - fn main() { - println!("cargo:foo=bar"); - } - "#, - ) - .build(); - p.cargo("test --lib").run(); -} - -#[cargo_test] -fn duplicate_script_with_extra_env() { - // Test where a build script is run twice, that emits different rustc-env - // and rustc-cfg values. In this case, one is run for host, the other for - // target. - if !cross_compile::can_run_on_host() { - return; - } - - let target = cross_compile::alternate(); - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "pm"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - pm = { path = "../pm" } - "#, - ) - .file( - "foo/src/lib.rs", - &r#" - //! ```rust - //! #[cfg(not(mycfg="{target}"))] - //! compile_error!{"expected mycfg set"} - //! assert_eq!(env!("CRATE_TARGET"), "{target}"); - //! assert_eq!(std::env::var("CRATE_TARGET").unwrap(), "{target}"); - //! ``` - - #[test] - fn check_target() { - #[cfg(not(mycfg="{target}"))] - compile_error!{"expected mycfg set"} - // Compile-time assertion. - assert_eq!(env!("CRATE_TARGET"), "{target}"); - // Run-time assertion. - assert_eq!(std::env::var("CRATE_TARGET").unwrap(), "{target}"); - } - "# - .replace("{target}", target), - ) - .file( - "foo/build.rs", - r#" - fn main() { - println!("cargo:rustc-env=CRATE_TARGET={}", std::env::var("TARGET").unwrap()); - println!("cargo:rustc-cfg=mycfg=\"{}\"", std::env::var("TARGET").unwrap()); - } - "#, - ) - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - - [lib] - proc-macro = true - # This is just here to speed things up. - doctest = false - - [dev-dependencies] - foo = { path = "../foo" } - "#, - ) - .file("pm/src/lib.rs", "") - .build(); - - p.cargo("test --workspace --target") - .arg(&target) - .with_stdout_contains("test check_target ... ok") - .run(); - - if cargo_test_support::is_nightly() { - p.cargo("test --workspace -Z doctest-xcompile --doc --target") - .arg(&target) - .masquerade_as_nightly_cargo() - .with_stdout_contains("test src/lib.rs - (line 2) ... ok") - .run(); - } -} - -#[cargo_test] -fn wrong_output() { - let p = project() - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:example"); - } - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[COMPILING] foo [..] -error: invalid output in build script of `foo v0.0.1 ([ROOT]/foo)`: `cargo:example` -Expected a line with `cargo:key=value` with an `=` character, but none was found. -See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \ -for more information about build script outputs. -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script_env.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script_env.rs deleted file mode 100644 index 17c729bc9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script_env.rs +++ /dev/null @@ -1,175 +0,0 @@ -//! Tests for build.rs rerun-if-env-changed and rustc-env - -use cargo_test_support::basic_manifest; -use cargo_test_support::project; -use cargo_test_support::sleep_ms; - -#[cargo_test] -fn rerun_if_env_changes() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-env-changed=FOO"); - } - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); - p.cargo("build") - .env("FOO", "bar") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); - p.cargo("build") - .env("FOO", "baz") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); - p.cargo("build") - .env("FOO", "baz") - .with_stderr("[FINISHED] [..]") - .run(); - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn rerun_if_env_or_file_changes() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-env-changed=FOO"); - println!("cargo:rerun-if-changed=foo"); - } - "#, - ) - .file("foo", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); - p.cargo("build") - .env("FOO", "bar") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); - p.cargo("build") - .env("FOO", "bar") - .with_stderr("[FINISHED] [..]") - .run(); - sleep_ms(1000); - p.change_file("foo", ""); - p.cargo("build") - .env("FOO", "bar") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustc_bootstrap() { - let build_rs = r#" - fn main() { - println!("cargo:rustc-env=RUSTC_BOOTSTRAP=1"); - } - "#; - let p = project() - .file("Cargo.toml", &basic_manifest("has-dashes", "0.0.1")) - .file("src/lib.rs", "#![feature(rustc_attrs)]") - .file("build.rs", build_rs) - .build(); - // RUSTC_BOOTSTRAP unset on stable should error - p.cargo("build") - .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]") - .with_stderr_contains( - "help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]", - ) - .with_status(101) - .run(); - // nightly should warn whether or not RUSTC_BOOTSTRAP is set - p.cargo("build") - .masquerade_as_nightly_cargo() - // NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc - // (this matters when tests are being run with a beta or stable cargo) - .env("RUSTC_BOOTSTRAP", "1") - .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]") - .run(); - // RUSTC_BOOTSTRAP set to the name of the library should warn - p.cargo("build") - .env("RUSTC_BOOTSTRAP", "has_dashes") - .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]") - .run(); - // RUSTC_BOOTSTRAP set to some random value should error - p.cargo("build") - .env("RUSTC_BOOTSTRAP", "bar") - .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]") - .with_stderr_contains( - "help: [..] set the environment variable `RUSTC_BOOTSTRAP=has_dashes` [..]", - ) - .with_status(101) - .run(); - - // Tests for binaries instead of libraries - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.0.1")) - .file("src/main.rs", "#![feature(rustc_attrs)] fn main() {}") - .file("build.rs", build_rs) - .build(); - // nightly should warn when there's no library whether or not RUSTC_BOOTSTRAP is set - p.cargo("build") - .masquerade_as_nightly_cargo() - // NOTE: uses RUSTC_BOOTSTRAP so it will be propagated to rustc - // (this matters when tests are being run with a beta or stable cargo) - .env("RUSTC_BOOTSTRAP", "1") - .with_stderr_contains("warning: Cannot set `RUSTC_BOOTSTRAP=1` [..]") - .run(); - // RUSTC_BOOTSTRAP conditionally set when there's no library should error (regardless of the value) - p.cargo("build") - .env("RUSTC_BOOTSTRAP", "foo") - .with_stderr_contains("error: Cannot set `RUSTC_BOOTSTRAP=1` [..]") - .with_stderr_contains("help: [..] set the environment variable `RUSTC_BOOTSTRAP=1` [..]") - .with_status(101) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script_extra_link_arg.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script_extra_link_arg.rs deleted file mode 100644 index 116b094d7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/build_script_extra_link_arg.rs +++ /dev/null @@ -1,312 +0,0 @@ -//! Tests for additional link arguments. - -// NOTE: Many of these tests use `without_status()` when passing bogus flags -// because MSVC link.exe just gives a warning on unknown flags (how helpful!), -// and other linkers will return an error. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_bin_manifest, basic_manifest, project}; - -#[cargo_test] -fn build_script_extra_link_arg_bin() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-arg-bins=--this-is-a-bogus-flag"); - } - "#, - ) - .build(); - - p.cargo("build -v") - .masquerade_as_nightly_cargo() - .without_status() - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]", - ) - .run(); -} - -#[cargo_test] -fn build_script_extra_link_arg_bin_single() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "foobar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [[bin]] - name = "foo" - [[bin]] - name = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-arg-bins=--bogus-flag-all"); - println!("cargo:rustc-link-arg-bin=foo=--bogus-flag-foo"); - println!("cargo:rustc-link-arg-bin=bar=--bogus-flag-bar"); - } - "#, - ) - .build(); - - p.cargo("build -v") - .masquerade_as_nightly_cargo() - .without_status() - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--bogus-flag-all -C link-arg=--bogus-flag-foo[..]", - ) - .with_stderr_contains( - "[RUNNING] `rustc --crate-name bar [..]-C link-arg=--bogus-flag-all -C link-arg=--bogus-flag-bar[..]", - ) - .run(); -} - -#[cargo_test] -fn build_script_extra_link_arg() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-arg=--this-is-a-bogus-flag"); - } - "#, - ) - .build(); - - p.cargo("build -v") - .masquerade_as_nightly_cargo() - .without_status() - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]", - ) - .run(); -} - -#[cargo_test] -fn link_arg_missing_target() { - // Errors when a given target doesn't exist. - let p = project() - .file("src/lib.rs", "") - .file( - "build.rs", - r#"fn main() { println!("cargo:rustc-link-arg-cdylib=--bogus"); }"#, - ) - .build(); - - // TODO: Uncomment this if cdylib restriction is re-added (see - // cdylib_link_arg_transitive below). - // p.cargo("check") - // .with_status(101) - // .with_stderr("\ - // [COMPILING] foo [..] - // error: invalid instruction `cargo:rustc-link-arg-cdylib` from build script of `foo v0.0.1 ([ROOT]/foo)` - // The package foo v0.0.1 ([ROOT]/foo) does not have a cdylib target. - // ") - // .run(); - - p.change_file( - "build.rs", - r#"fn main() { println!("cargo:rustc-link-arg-bins=--bogus"); }"#, - ); - - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("\ -[COMPILING] foo [..] -error: invalid instruction `cargo:rustc-link-arg-bins` from build script of `foo v0.0.1 ([ROOT]/foo)` -The package foo v0.0.1 ([ROOT]/foo) does not have a bin target. -") - .run(); - - p.change_file( - "build.rs", - r#"fn main() { println!("cargo:rustc-link-arg-bin=abc=--bogus"); }"#, - ); - - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[COMPILING] foo [..] -error: invalid instruction `cargo:rustc-link-arg-bin` from build script of `foo v0.0.1 ([ROOT]/foo)` -The package foo v0.0.1 ([ROOT]/foo) does not have a bin target with the name `abc`. -", - ) - .run(); - - p.change_file( - "build.rs", - r#"fn main() { println!("cargo:rustc-link-arg-bin=abc"); }"#, - ); - - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[COMPILING] foo [..] -error: invalid instruction `cargo:rustc-link-arg-bin=abc` from build script of `foo v0.0.1 ([ROOT]/foo)` -The instruction should have the form cargo:rustc-link-arg-bin=BIN=ARG -", - ) - .run(); -} - -#[cargo_test] -fn cdylib_link_arg_transitive() { - // There was an unintended regression in 1.50 where rustc-link-arg-cdylib - // arguments from dependencies were being applied in the parent package. - // Previously it was silently ignored. - // See https://github.com/rust-lang/cargo/issues/9562 - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [lib] - crate-type = ["cdylib"] - - [dependencies] - bar = {path="bar"} - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) - .file("bar/src/lib.rs", "") - .file( - "bar/build.rs", - r#" - fn main() { - println!("cargo:rustc-link-arg-cdylib=--bogus"); - } - "#, - ) - .build(); - p.cargo("build -v") - .without_status() - .with_stderr_contains( - "\ -[COMPILING] bar v1.0.0 [..] -[RUNNING] `rustc --crate-name build_script_build bar/build.rs [..] -[RUNNING] `[..]build-script-build[..] -warning: cargo:rustc-link-arg-cdylib was specified in the build script of bar v1.0.0 \ -([ROOT]/foo/bar), but that package does not contain a cdylib target - -Allowing this was an unintended change in the 1.50 release, and may become an error in \ -the future. For more information, see . -[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..] -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C link-arg=--bogus[..]` -", - ) - .run(); -} - -#[cargo_test] -fn link_arg_transitive_not_allowed() { - // Verify that transitive dependencies don't pass link args. - // - // Note that rustc-link-arg doesn't have any errors or warnings when it is - // unused. Perhaps that could be more aggressive, but it is difficult - // since it could be used for test binaries. - Package::new("bar", "1.0.0") - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-arg=--bogus"); - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [lib] - crate-type = ["cdylib"] - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] [..] -[DOWNLOADED] [..] -[COMPILING] bar v1.0.0 -[RUNNING] `rustc --crate-name build_script_build [..] -[RUNNING] `[..]/build-script-build[..] -[RUNNING] `rustc --crate-name bar [..] -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo src/lib.rs [..] -[FINISHED] dev [..] -", - ) - .with_stderr_does_not_contain("--bogus") - .run(); -} - -#[cargo_test] -fn link_arg_with_doctest() { - let p = project() - .file( - "src/lib.rs", - r#" - //! ``` - //! let x = 5; - //! assert_eq!(x, 5); - //! ``` - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-link-arg=--this-is-a-bogus-flag"); - } - "#, - ) - .build(); - - p.cargo("test --doc -v") - .masquerade_as_nightly_cargo() - .without_status() - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--crate-name foo [..]-C link-arg=--this-is-a-bogus-flag[..]", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cache_messages.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cache_messages.rs deleted file mode 100644 index 526abe52b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cache_messages.rs +++ /dev/null @@ -1,488 +0,0 @@ -//! Tests for caching compiler diagnostics. - -use super::messages::raw_rustc_output; -use cargo_test_support::tools; -use cargo_test_support::{basic_manifest, is_coarse_mtime, project, registry::Package, sleep_ms}; - -fn as_str(bytes: &[u8]) -> &str { - std::str::from_utf8(bytes).expect("valid utf-8") -} - -#[cargo_test] -fn simple() { - // A simple example that generates two warnings (unused functions). - let p = project() - .file( - "src/lib.rs", - " - fn a() {} - fn b() {} - ", - ) - .build(); - - // Capture what rustc actually emits. This is done to avoid relying on the - // exact message formatting in rustc. - let rustc_output = raw_rustc_output(&p, "src/lib.rs", &[]); - - // -q so the output is the same as rustc (no "Compiling" or "Finished"). - let cargo_output1 = p - .cargo("check -q --color=never") - .exec_with_output() - .expect("cargo to run"); - assert_eq!(rustc_output, as_str(&cargo_output1.stderr)); - assert!(cargo_output1.stdout.is_empty()); - // Check that the cached version is exactly the same. - let cargo_output2 = p - .cargo("check -q") - .exec_with_output() - .expect("cargo to run"); - assert_eq!(rustc_output, as_str(&cargo_output2.stderr)); - assert!(cargo_output2.stdout.is_empty()); -} - -// same as `simple`, except everything is using the short format -#[cargo_test] -fn simple_short() { - let p = project() - .file( - "src/lib.rs", - " - fn a() {} - fn b() {} - ", - ) - .build(); - - let rustc_output = raw_rustc_output(&p, "src/lib.rs", &["--error-format=short"]); - - let cargo_output1 = p - .cargo("check -q --color=never --message-format=short") - .exec_with_output() - .expect("cargo to run"); - assert_eq!(rustc_output, as_str(&cargo_output1.stderr)); - // assert!(cargo_output1.stdout.is_empty()); - let cargo_output2 = p - .cargo("check -q --message-format=short") - .exec_with_output() - .expect("cargo to run"); - println!("{}", String::from_utf8_lossy(&cargo_output2.stdout)); - assert_eq!(rustc_output, as_str(&cargo_output2.stderr)); - assert!(cargo_output2.stdout.is_empty()); -} - -#[cargo_test] -fn color() { - // Check enabling/disabling color. - let p = project().file("src/lib.rs", "fn a() {}").build(); - - // Hack for issue in fwdansi 1.1. It is squashing multiple resets - // into a single reset. - // https://github.com/kennytm/fwdansi/issues/2 - fn normalize(s: &str) -> String { - #[cfg(windows)] - return s.replace("\x1b[0m\x1b[0m", "\x1b[0m"); - #[cfg(not(windows))] - return s.to_string(); - } - - let compare = |a, b| { - assert_eq!(normalize(a), normalize(b)); - }; - - // Capture the original color output. - let rustc_color = raw_rustc_output(&p, "src/lib.rs", &["--color=always"]); - assert!(rustc_color.contains("\x1b[")); - - // Capture the original non-color output. - let rustc_nocolor = raw_rustc_output(&p, "src/lib.rs", &[]); - assert!(!rustc_nocolor.contains("\x1b[")); - - // First pass, non-cached, with color, should be the same. - let cargo_output1 = p - .cargo("check -q --color=always") - .exec_with_output() - .expect("cargo to run"); - compare(&rustc_color, as_str(&cargo_output1.stderr)); - - // Replay cached, with color. - let cargo_output2 = p - .cargo("check -q --color=always") - .exec_with_output() - .expect("cargo to run"); - compare(&rustc_color, as_str(&cargo_output2.stderr)); - - // Replay cached, no color. - let cargo_output_nocolor = p - .cargo("check -q --color=never") - .exec_with_output() - .expect("cargo to run"); - compare(&rustc_nocolor, as_str(&cargo_output_nocolor.stderr)); -} - -#[cargo_test] -fn cached_as_json() { - // Check that cached JSON output is the same. - let p = project().file("src/lib.rs", "fn a() {}").build(); - - // Grab the non-cached output, feature disabled. - // NOTE: When stabilizing, this will need to be redone. - let cargo_output = p - .cargo("check --message-format=json") - .exec_with_output() - .expect("cargo to run"); - assert!(cargo_output.status.success()); - let orig_cargo_out = as_str(&cargo_output.stdout); - assert!(orig_cargo_out.contains("compiler-message")); - p.cargo("clean").run(); - - // Check JSON output, not fresh. - let cargo_output1 = p - .cargo("check --message-format=json") - .exec_with_output() - .expect("cargo to run"); - assert_eq!(as_str(&cargo_output1.stdout), orig_cargo_out); - - // Check JSON output, fresh. - let cargo_output2 = p - .cargo("check --message-format=json") - .exec_with_output() - .expect("cargo to run"); - // The only difference should be this field. - let fix_fresh = as_str(&cargo_output2.stdout).replace("\"fresh\":true", "\"fresh\":false"); - assert_eq!(fix_fresh, orig_cargo_out); -} - -#[cargo_test] -fn clears_cache_after_fix() { - // Make sure the cache is invalidated when there is no output. - let p = project().file("src/lib.rs", "fn asdf() {}").build(); - // Fill the cache. - p.cargo("check").with_stderr_contains("[..]asdf[..]").run(); - let cpath = p - .glob("target/debug/.fingerprint/foo-*/output-*") - .next() - .unwrap() - .unwrap(); - assert!(std::fs::read_to_string(cpath).unwrap().contains("asdf")); - - // Fix it. - if is_coarse_mtime() { - sleep_ms(1000); - } - p.change_file("src/lib.rs", ""); - - p.cargo("check") - .with_stdout("") - .with_stderr( - "\ -[CHECKING] foo [..] -[FINISHED] [..] -", - ) - .run(); - assert_eq!( - p.glob("target/debug/.fingerprint/foo-*/output-*").count(), - 0 - ); - - // And again, check the cache is correct. - p.cargo("check") - .with_stdout("") - .with_stderr( - "\ -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc() { - // Create a warning in rustdoc. - let p = project() - .file( - "src/lib.rs", - " - #![warn(missing_docs)] - pub fn f() {} - ", - ) - .build(); - - let rustdoc_output = p - .cargo("doc -q --color=always") - .exec_with_output() - .expect("rustdoc to run"); - assert!(rustdoc_output.status.success()); - let rustdoc_stderr = as_str(&rustdoc_output.stderr); - assert!(rustdoc_stderr.contains("missing")); - assert!(rustdoc_stderr.contains("\x1b[")); - assert_eq!( - p.glob("target/debug/.fingerprint/foo-*/output-*").count(), - 1 - ); - - // Check the cached output. - let rustdoc_output = p - .cargo("doc -q --color=always") - .exec_with_output() - .expect("rustdoc to run"); - assert_eq!(as_str(&rustdoc_output.stderr), rustdoc_stderr); -} - -#[cargo_test] -fn fix() { - // Make sure `fix` is not broken by caching. - let p = project().file("src/lib.rs", "pub fn try() {}").build(); - - p.cargo("fix --edition --allow-no-vcs").run(); - - assert_eq!(p.read_file("src/lib.rs"), "pub fn r#try() {}"); -} - -#[cargo_test] -fn very_verbose() { - // Handle cap-lints in dependencies. - Package::new("bar", "1.0.0") - .file("src/lib.rs", "fn not_used() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check -vv") - .with_stderr_contains("[..]not_used[..]") - .run(); - - p.cargo("check").with_stderr("[FINISHED] [..]").run(); - - p.cargo("check -vv") - .with_stderr_contains("[..]not_used[..]") - .run(); -} - -#[cargo_test] -fn doesnt_create_extra_files() { - // Ensure it doesn't create `output` files when not needed. - Package::new("dep", "1.0.0") - .file("src/lib.rs", "fn unused() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - - assert_eq!( - p.glob("target/debug/.fingerprint/foo-*/output-*").count(), - 0 - ); - assert_eq!( - p.glob("target/debug/.fingerprint/dep-*/output-*").count(), - 0 - ); - if is_coarse_mtime() { - sleep_ms(1000); - } - p.change_file("src/lib.rs", "fn unused() {}"); - p.cargo("build").run(); - assert_eq!( - p.glob("target/debug/.fingerprint/foo-*/output-*").count(), - 1 - ); -} - -#[cargo_test] -fn replay_non_json() { - // Handles non-json output. - let rustc = project() - .at("rustc") - .file("Cargo.toml", &basic_manifest("rustc_alt", "1.0.0")) - .file( - "src/main.rs", - r#" - fn main() { - eprintln!("line 1"); - eprintln!("line 2"); - let r = std::process::Command::new("rustc") - .args(std::env::args_os().skip(1)) - .status(); - std::process::exit(r.unwrap().code().unwrap_or(2)); - } - "#, - ) - .build(); - rustc.cargo("build").run(); - let p = project().file("src/lib.rs", "").build(); - p.cargo("check") - .env("RUSTC", rustc.bin("rustc_alt")) - .with_stderr( - "\ -[CHECKING] foo [..] -line 1 -line 2 -[FINISHED] dev [..] -", - ) - .run(); - - p.cargo("check") - .env("RUSTC", rustc.bin("rustc_alt")) - .with_stderr( - "\ -line 1 -line 2 -[FINISHED] dev [..] -", - ) - .run(); -} - -#[cargo_test] -fn caching_large_output() { - // Handles large number of messages. - // This is an arbitrary amount that is greater than the 100 used in - // job_queue. This is here to check for deadlocks or any other problems. - const COUNT: usize = 250; - let rustc = project() - .at("rustc") - .file("Cargo.toml", &basic_manifest("rustc_alt", "1.0.0")) - .file( - "src/main.rs", - &format!( - r#" - fn main() {{ - for i in 0..{} {{ - eprintln!("{{{{\"message\": \"test message {{}}\", \"level\": \"warning\", \ - \"spans\": [], \"children\": [], \"rendered\": \"test message {{}}\"}}}}", - i, i); - }} - let r = std::process::Command::new("rustc") - .args(std::env::args_os().skip(1)) - .status(); - std::process::exit(r.unwrap().code().unwrap_or(2)); - }} - "#, - COUNT - ), - ) - .build(); - - let mut expected = String::new(); - for i in 0..COUNT { - expected.push_str(&format!("test message {}\n", i)); - } - - rustc.cargo("build").run(); - let p = project().file("src/lib.rs", "").build(); - p.cargo("check") - .env("RUSTC", rustc.bin("rustc_alt")) - .with_stderr(&format!( - "\ -[CHECKING] foo [..] -{}warning: `foo` (lib) generated 250 warnings -[FINISHED] dev [..] -", - expected - )) - .run(); - - p.cargo("check") - .env("RUSTC", rustc.bin("rustc_alt")) - .with_stderr(&format!( - "\ -{}warning: `foo` (lib) generated 250 warnings -[FINISHED] dev [..] -", - expected - )) - .run(); -} - -#[cargo_test] -fn rustc_workspace_wrapper() { - let p = project() - .file( - "src/lib.rs", - "pub fn f() { assert!(true); }\n\ - fn unused_func() {}", - ) - .build(); - - p.cargo("check -v") - .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") - .run(); - - // Check without a wrapper should rebuild - p.cargo("check -v") - .with_stderr_contains( - "\ -[CHECKING] foo [..] -[RUNNING] `rustc[..] -[WARNING] [..]unused_func[..] -", - ) - .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") - .run(); - - // Again, reading from the cache. - p.cargo("check -v") - .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) - .with_stderr_contains("[FRESH] foo [..]") - .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") - .run(); - - // And `check` should also be fresh, reading from cache. - p.cargo("check -v") - .with_stderr_contains("[FRESH] foo [..]") - .with_stderr_contains("[WARNING] [..]unused_func[..]") - .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name foo src/lib.rs [..]") - .run(); -} - -#[cargo_test] -fn wacky_hashless_fingerprint() { - // On Windows, executables don't have hashes. This checks for a bad - // assumption that caused bad caching. - let p = project() - .file("src/bin/a.rs", "fn main() { let unused = 1; }") - .file("src/bin/b.rs", "fn main() {}") - .build(); - p.cargo("build --bin b") - .with_stderr_does_not_contain("[..]unused[..]") - .run(); - p.cargo("build --bin a") - .with_stderr_contains("[..]unused[..]") - .run(); - // This should not pick up the cache from `a`. - p.cargo("build --bin b") - .with_stderr_does_not_contain("[..]unused[..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_alias_config.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_alias_config.rs deleted file mode 100644 index 6777aeabf..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_alias_config.rs +++ /dev/null @@ -1,335 +0,0 @@ -//! Tests for `[alias]` config command aliases. - -use std::env; - -use cargo_test_support::tools::echo_subcommand; -use cargo_test_support::{basic_bin_manifest, project}; - -#[cargo_test] -fn alias_incorrect_config_type() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - b-cargo-test = 5 - "#, - ) - .build(); - - p.cargo("b-cargo-test -v") - .with_status(101) - .with_stderr_contains( - "\ -[ERROR] invalid configuration for key `alias.b-cargo-test` -expected a list, but found a integer for [..]", - ) - .run(); -} - -#[cargo_test] -fn alias_config() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - b-cargo-test = "build" - "#, - ) - .build(); - - p.cargo("b-cargo-test -v") - .with_stderr_contains( - "\ -[COMPILING] foo v0.5.0 [..] -[RUNNING] `rustc --crate-name foo [..]", - ) - .run(); -} - -#[cargo_test] -fn dependent_alias() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - b-cargo-test = "build" - a-cargo-test = ["b-cargo-test", "-v"] - "#, - ) - .build(); - - p.cargo("a-cargo-test") - .with_stderr_contains( - "\ -[COMPILING] foo v0.5.0 [..] -[RUNNING] `rustc --crate-name foo [..]", - ) - .run(); -} - -#[cargo_test] -fn alias_shadowing_external_subcommand() { - let echo = echo_subcommand(); - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - echo = "build" - "#, - ) - .build(); - - let mut paths: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect(); - paths.push(echo.target_debug_dir()); - let path = env::join_paths(paths).unwrap(); - - p.cargo("echo") - .env("PATH", &path) - .with_stderr("\ -[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]` -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #10049 . -[COMPILING] foo v0.5.0 [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn default_args_alias() { - let echo = echo_subcommand(); - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - echo = "echo --flag1 --flag2" - test-1 = "echo" - build = "build --verbose" - "#, - ) - .build(); - - let mut paths: Vec<_> = env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect(); - paths.push(echo.target_debug_dir()); - let path = env::join_paths(paths).unwrap(); - - p.cargo("echo") - .env("PATH", &path) - .with_status(101) - .with_stderr("\ -[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]` -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #10049 . -error: alias echo has unresolvable recursive definition: echo -> echo -", - ) - .run(); - - p.cargo("test-1") - .env("PATH", &path) - .with_status(101) - .with_stderr("\ -[WARNING] user-defined alias `echo` is shadowing an external subcommand found at: `[ROOT]/cargo-echo/target/debug/cargo-echo[EXE]` -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #10049 . -error: alias test-1 has unresolvable recursive definition: test-1 -> echo -> echo -", - ) - .run(); - - // Builtins are not expanded by rule - p.cargo("build") - .with_stderr( - "\ -[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command -[COMPILING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn corecursive_alias() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - test-1 = "test-2 --flag1" - test-2 = "test-3 --flag2" - test-3 = "test-1 --flag3" - "#, - ) - .build(); - - p.cargo("test-1") - .with_status(101) - .with_stderr( - "error: alias test-1 has unresolvable recursive definition: test-1 -> test-2 -> test-3 -> test-1", - ) - .run(); - - p.cargo("test-2") - .with_status(101) - .with_stderr( - "error: alias test-2 has unresolvable recursive definition: test-2 -> test-3 -> test-1 -> test-2", - ) - .run(); -} - -#[cargo_test] -fn alias_list_test() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - b-cargo-test = ["build", "--release"] - "#, - ) - .build(); - - p.cargo("b-cargo-test -v") - .with_stderr_contains("[COMPILING] foo v0.5.0 [..]") - .with_stderr_contains("[RUNNING] `rustc --crate-name [..]") - .run(); -} - -#[cargo_test] -fn alias_with_flags_config() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - b-cargo-test = "build --release" - "#, - ) - .build(); - - p.cargo("b-cargo-test -v") - .with_stderr_contains("[COMPILING] foo v0.5.0 [..]") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]") - .run(); -} - -#[cargo_test] -fn alias_cannot_shadow_builtin_command() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - build = "fetch" - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[WARNING] user-defined alias `build` is ignored, because it is shadowed by a built-in command -[COMPILING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn alias_override_builtin_alias() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [alias] - b = "run" - "#, - ) - .build(); - - p.cargo("b") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo[EXE]` -", - ) - .run(); -} - -#[cargo_test] -fn builtin_alias_takes_options() { - // #6381 - let p = project() - .file("src/lib.rs", "") - .file( - "examples/ex1.rs", - r#"fn main() { println!("{}", std::env::args().skip(1).next().unwrap()) }"#, - ) - .build(); - - p.cargo("r --example ex1 -- asdf").with_stdout("asdf").run(); -} - -#[cargo_test] -fn global_options_with_alias() { - // Check that global options are passed through. - let p = project().file("src/lib.rs", "").build(); - - p.cargo("-v c") - .with_stderr( - "\ -[CHECKING] foo [..] -[RUNNING] `rustc [..] -[FINISHED] dev [..] -", - ) - .run(); -} - -#[cargo_test] -fn weird_check() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("-- check --invalid_argument -some-other-argument") - .with_stderr( - "\ -[WARNING] trailing arguments after built-in command `check` are ignored: `--invalid_argument -some-other-argument` -[CHECKING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_command.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_command.rs deleted file mode 100644 index 3fd4badcc..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_command.rs +++ /dev/null @@ -1,382 +0,0 @@ -//! Tests for custom cargo commands and other global command features. - -use std::env; -use std::fs; -use std::io::Read; -use std::path::{Path, PathBuf}; -use std::process::Stdio; -use std::str; - -use cargo_test_support::registry::Package; -use cargo_test_support::tools::echo_subcommand; -use cargo_test_support::{ - basic_bin_manifest, cargo_exe, cargo_process, paths, project, project_in_home, -}; - -fn path() -> Vec { - env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect() -} - -#[cargo_test] -fn list_commands_with_descriptions() { - let p = project().build(); - p.cargo("--list") - .with_stdout_contains( - " build Compile a local package and all of its dependencies", - ) - // Assert that `read-manifest` prints the right one-line description followed by another - // command, indented. - .with_stdout_contains( - " read-manifest Print a JSON representation of a Cargo.toml manifest.", - ) - .run(); -} - -#[cargo_test] -fn list_builtin_aliases_with_descriptions() { - let p = project().build(); - p.cargo("--list") - .with_stdout_contains(" b alias: build") - .with_stdout_contains(" c alias: check") - .with_stdout_contains(" r alias: run") - .with_stdout_contains(" t alias: test") - .run(); -} - -#[cargo_test] -fn list_custom_aliases_with_descriptions() { - let p = project_in_home("proj") - .file( - &paths::home().join(".cargo").join("config"), - r#" - [alias] - myaliasstr = "foo --bar" - myaliasvec = ["foo", "--bar"] - "#, - ) - .build(); - - p.cargo("--list") - .with_stdout_contains(" myaliasstr alias: foo --bar") - .with_stdout_contains(" myaliasvec alias: foo --bar") - .run(); -} - -#[cargo_test] -fn list_dedupe() { - let p = project() - .executable(Path::new("path-test-1").join("cargo-dupe"), "") - .executable(Path::new("path-test-2").join("cargo-dupe"), "") - .build(); - - let mut path = path(); - path.push(p.root().join("path-test-1")); - path.push(p.root().join("path-test-2")); - let path = env::join_paths(path.iter()).unwrap(); - - p.cargo("--list") - .env("PATH", &path) - .with_stdout_contains_n(" dupe", 1) - .run(); -} - -#[cargo_test] -fn list_command_looks_at_path() { - let proj = project() - .executable(Path::new("path-test").join("cargo-1"), "") - .build(); - - let mut path = path(); - path.push(proj.root().join("path-test")); - let path = env::join_paths(path.iter()).unwrap(); - let output = cargo_process("-v --list") - .env("PATH", &path) - .exec_with_output() - .unwrap(); - let output = str::from_utf8(&output.stdout).unwrap(); - assert!( - output.contains("\n 1 "), - "missing 1: {}", - output - ); -} - -#[cargo_test] -fn list_command_handles_known_external_commands() { - let p = project() - .executable(Path::new("path-test").join("cargo-fmt"), "") - .build(); - - let fmt_desc = " fmt Formats all bin and lib files of the current crate using rustfmt."; - - // Without path - fmt isn't there - p.cargo("--list") - .env("PATH", "") - .with_stdout_does_not_contain(fmt_desc) - .run(); - - // With path - fmt is there with known description - let mut path = path(); - path.push(p.root().join("path-test")); - let path = env::join_paths(path.iter()).unwrap(); - - p.cargo("--list") - .env("PATH", &path) - .with_stdout_contains(fmt_desc) - .run(); -} - -#[cargo_test] -fn list_command_resolves_symlinks() { - let proj = project() - .symlink(cargo_exe(), Path::new("path-test").join("cargo-2")) - .build(); - - let mut path = path(); - path.push(proj.root().join("path-test")); - let path = env::join_paths(path.iter()).unwrap(); - let output = cargo_process("-v --list") - .env("PATH", &path) - .exec_with_output() - .unwrap(); - let output = str::from_utf8(&output.stdout).unwrap(); - assert!( - output.contains("\n 2 "), - "missing 2: {}", - output - ); -} - -#[cargo_test] -fn find_closest_capital_c_to_c() { - cargo_process("C") - .with_status(101) - .with_stderr_contains( - "\ -error: no such subcommand: `C` - -Did you mean `c`? -", - ) - .run(); -} - -#[cargo_test] -fn find_closest_captial_b_to_b() { - cargo_process("B") - .with_status(101) - .with_stderr_contains( - "\ -error: no such subcommand: `B` - -Did you mean `b`? -", - ) - .run(); -} - -#[cargo_test] -fn find_closest_biuld_to_build() { - cargo_process("biuld") - .with_status(101) - .with_stderr_contains( - "\ -error: no such subcommand: `biuld` - -Did you mean `build`? -", - ) - .run(); - - // But, if we actually have `biuld`, it must work! - // https://github.com/rust-lang/cargo/issues/5201 - Package::new("cargo-biuld", "1.0.0") - .file( - "src/main.rs", - r#" - fn main() { - println!("Similar, but not identical to, build"); - } - "#, - ) - .publish(); - - cargo_process("install cargo-biuld").run(); - cargo_process("biuld") - .with_stdout("Similar, but not identical to, build\n") - .run(); - cargo_process("--list") - .with_stdout_contains( - " build Compile a local package and all of its dependencies\n", - ) - .with_stdout_contains(" biuld\n") - .run(); -} - -#[cargo_test] -fn find_closest_alias() { - let root = paths::root(); - let my_home = root.join("my_home"); - fs::create_dir(&my_home).unwrap(); - fs::write( - &my_home.join("config"), - r#" - [alias] - myalias = "build" - "#, - ) - .unwrap(); - - cargo_process("myalais") - .env("CARGO_HOME", &my_home) - .with_status(101) - .with_stderr_contains( - "\ -error: no such subcommand: `myalais` - -Did you mean `myalias`? -", - ) - .run(); - - // But, if no alias is defined, it must not suggest one! - cargo_process("myalais") - .with_status(101) - .with_stderr_contains( - "\ -error: no such subcommand: `myalais` -", - ) - .with_stderr_does_not_contain( - "\ -Did you mean `myalias`? -", - ) - .run(); -} - -// If a subcommand is more than an edit distance of 3 away, we don't make a suggestion. -#[cargo_test] -fn find_closest_dont_correct_nonsense() { - cargo_process("there-is-no-way-that-there-is-a-command-close-to-this") - .cwd(&paths::root()) - .with_status(101) - .with_stderr( - "[ERROR] no such subcommand: \ - `there-is-no-way-that-there-is-a-command-close-to-this` -", - ) - .run(); -} - -#[cargo_test] -fn displays_subcommand_on_error() { - cargo_process("invalid-command") - .with_status(101) - .with_stderr("[ERROR] no such subcommand: `invalid-command`\n") - .run(); -} - -#[cargo_test] -fn override_cargo_home() { - let root = paths::root(); - let my_home = root.join("my_home"); - fs::create_dir(&my_home).unwrap(); - fs::write( - &my_home.join("config"), - r#" - [cargo-new] - vcs = "none" - "#, - ) - .unwrap(); - - cargo_process("new foo").env("CARGO_HOME", &my_home).run(); - - assert!(!paths::root().join("foo/.git").is_dir()); - - cargo_process("new foo2").run(); - - assert!(paths::root().join("foo2/.git").is_dir()); -} - -#[cargo_test] -fn cargo_subcommand_env() { - let src = format!( - r#" - use std::env; - - fn main() {{ - println!("{{}}", env::var("{}").unwrap()); - }} - "#, - cargo::CARGO_ENV - ); - - let p = project() - .at("cargo-envtest") - .file("Cargo.toml", &basic_bin_manifest("cargo-envtest")) - .file("src/main.rs", &src) - .build(); - - let target_dir = p.target_debug_dir(); - - p.cargo("build").run(); - assert!(p.bin("cargo-envtest").is_file()); - - let cargo = cargo_exe().canonicalize().unwrap(); - let mut path = path(); - path.push(target_dir); - let path = env::join_paths(path.iter()).unwrap(); - - cargo_process("envtest") - .env("PATH", &path) - .with_stdout(cargo.to_str().unwrap()) - .run(); -} - -#[cargo_test] -fn cargo_subcommand_args() { - let p = echo_subcommand(); - let cargo_foo_bin = p.bin("cargo-echo"); - assert!(cargo_foo_bin.is_file()); - - let mut path = path(); - path.push(p.target_debug_dir()); - let path = env::join_paths(path.iter()).unwrap(); - - cargo_process("echo bar -v --help") - .env("PATH", &path) - .with_stdout("echo bar -v --help") - .run(); -} - -#[cargo_test] -fn explain() { - cargo_process("--explain E0001") - .with_stdout_contains( - "This error suggests that the expression arm corresponding to the noted pattern", - ) - .run(); -} - -#[cargo_test] -fn closed_output_ok() { - // Checks that closed output doesn't cause an error. - let mut p = cargo_process("--list").build_command(); - p.stdout(Stdio::piped()).stderr(Stdio::piped()); - let mut child = p.spawn().unwrap(); - // Close stdout - drop(child.stdout.take()); - // Read stderr - let mut s = String::new(); - child - .stderr - .as_mut() - .unwrap() - .read_to_string(&mut s) - .unwrap(); - let status = child.wait().unwrap(); - assert!(status.success()); - assert!(s.is_empty(), "{}", s); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_config.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_config.rs deleted file mode 100644 index a11e9afc5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_config.rs +++ /dev/null @@ -1,522 +0,0 @@ -//! Tests for the `cargo config` command. - -use super::config::write_config_at; -use cargo_test_support::paths; -use std::fs; -use std::path::PathBuf; - -fn cargo_process(s: &str) -> cargo_test_support::Execs { - let mut p = cargo_test_support::cargo_process(s); - // Clear out some of the environment added by the default cargo_process so - // the tests don't need to deal with it. - p.env_remove("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO") - .env_remove("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO") - .env_remove("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO") - .env_remove("CARGO_PROFILE_BENCH_SPLIT_DEBUGINFO") - .env_remove("CARGO_INCREMENTAL"); - p -} - -#[cargo_test] -fn gated() { - cargo_process("config get") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("\ -error: the `cargo config` command is unstable, pass `-Z unstable-options` to enable it -See https://github.com/rust-lang/cargo/issues/9301 for more information about the `cargo config` command. -") - .run(); -} - -fn common_setup() -> PathBuf { - write_config_at( - paths::home().join(".cargo/config.toml"), - " - [alias] - foo = \"abc --xyz\" - [build] - jobs = 99 - rustflags = [\"--flag-global\"] - [profile.dev] - opt-level = 3 - [profile.dev.package.foo] - opt-level = 1 - [target.'cfg(target_os = \"linux\")'] - runner = \"runme\" - - # How unknown keys are handled. - [extra-table] - somekey = \"somevalue\" - ", - ); - let sub_folder = paths::root().join("foo/.cargo"); - write_config_at( - sub_folder.join("config.toml"), - " - [alias] - sub-example = [\"sub\", \"example\"] - [build] - rustflags = [\"--flag-directory\"] - ", - ); - sub_folder -} - -#[cargo_test] -fn get_toml() { - // Notes: - // - The "extra-table" is shown without a warning. I'm not sure how that - // should be handled, since displaying warnings could cause problems - // with ingesting the output. - // - Environment variables aren't loaded. :( - let sub_folder = common_setup(); - cargo_process("config get -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_ALIAS_BAR", "cat dog") - .env("CARGO_BUILD_JOBS", "100") - // The weird forward slash in the linux line is due to testsuite normalization. - .with_stdout( - "\ -alias.foo = \"abc --xyz\" -alias.sub-example = [\"sub\", \"example\"] -build.jobs = 99 -build.rustflags = [\"--flag-directory\", \"--flag-global\"] -extra-table.somekey = \"somevalue\" -profile.dev.opt-level = 3 -profile.dev.package.foo.opt-level = 1 -target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" -# The following environment variables may affect the loaded values. -# CARGO_ALIAS_BAR=[..]cat dog[..] -# CARGO_BUILD_JOBS=100 -# CARGO_HOME=[ROOT]/home/.cargo -", - ) - .with_stderr("") - .run(); - - // Env keys work if they are specific. - cargo_process("config get build.jobs -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_BUILD_JOBS", "100") - .with_stdout("build.jobs = 100") - .with_stderr("") - .run(); - - // Array value. - cargo_process("config get build.rustflags -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stdout("build.rustflags = [\"--flag-directory\", \"--flag-global\"]") - .with_stderr("") - .run(); - - // Sub-table - cargo_process("config get profile -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -profile.dev.opt-level = 3 -profile.dev.package.foo.opt-level = 1 -", - ) - .with_stderr("") - .run(); - - // Specific profile entry. - cargo_process("config get profile.dev.opt-level -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stdout("profile.dev.opt-level = 3") - .with_stderr("") - .run(); - - // A key that isn't set. - cargo_process("config get build.rustc -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stdout("") - .with_stderr("error: config value `build.rustc` is not set") - .run(); - - // A key that is not part of Cargo's config schema. - cargo_process("config get not.set -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stdout("") - .with_stderr("error: config value `not.set` is not set") - .run(); -} - -#[cargo_test] -fn get_json() { - // Notes: - // - This does not show env vars at all. :( - let all_json = r#" - { - "alias": { - "foo": "abc --xyz", - "sub-example": [ - "sub", - "example" - ] - }, - "build": { - "jobs": 99, - "rustflags": [ - "--flag-directory", - "--flag-global" - ] - }, - "extra-table": { - "somekey": "somevalue" - }, - "profile": { - "dev": { - "opt-level": 3, - "package": { - "foo": { - "opt-level": 1 - } - } - } - }, - "target": { - "cfg(target_os = \"linux\")": { - "runner": "runme" - } - } - } - "#; - let sub_folder = common_setup(); - cargo_process("config get --format=json -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_ALIAS_BAR", "cat dog") - .env("CARGO_BUILD_JOBS", "100") - .with_json(all_json) - .with_stderr( - "\ -note: The following environment variables may affect the loaded values. -CARGO_ALIAS_BAR=[..]cat dog[..] -CARGO_BUILD_JOBS=100 -CARGO_HOME=[ROOT]/home/.cargo -", - ) - .run(); - - // json-value is the same for the entire root table - cargo_process("config get --format=json-value -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_json(all_json) - .with_stderr( - "\ -note: The following environment variables may affect the loaded values. -CARGO_HOME=[ROOT]/home/.cargo -", - ) - .run(); - - cargo_process("config get --format=json build.jobs -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_json( - r#" - {"build": {"jobs": 99}} - "#, - ) - .with_stderr("") - .run(); - - cargo_process("config get --format=json-value build.jobs -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stdout("99") - .with_stderr("") - .run(); -} - -#[cargo_test] -fn show_origin_toml() { - let sub_folder = common_setup(); - cargo_process("config get --show-origin -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -alias.foo = \"abc --xyz\" # [ROOT]/home/.cargo/config.toml -alias.sub-example = [ - \"sub\", # [ROOT]/foo/.cargo/config.toml - \"example\", # [ROOT]/foo/.cargo/config.toml -] -build.jobs = 99 # [ROOT]/home/.cargo/config.toml -build.rustflags = [ - \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml - \"--flag-global\", # [ROOT]/home/.cargo/config.toml -] -extra-table.somekey = \"somevalue\" # [ROOT]/home/.cargo/config.toml -profile.dev.opt-level = 3 # [ROOT]/home/.cargo/config.toml -profile.dev.package.foo.opt-level = 1 # [ROOT]/home/.cargo/config.toml -target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" # [ROOT]/home/.cargo/config.toml -# The following environment variables may affect the loaded values. -# CARGO_HOME=[ROOT]/home/.cargo -", - ) - .with_stderr("") - .run(); - - cargo_process("config get --show-origin build.rustflags -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") - .with_stdout( - "\ -build.rustflags = [ - \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml - \"--flag-global\", # [ROOT]/home/.cargo/config.toml - \"env1\", # environment variable `CARGO_BUILD_RUSTFLAGS` - \"env2\", # environment variable `CARGO_BUILD_RUSTFLAGS` -] -", - ) - .with_stderr("") - .run(); -} - -#[cargo_test] -fn show_origin_toml_cli() { - let sub_folder = common_setup(); - cargo_process("config get --show-origin build.jobs -Zunstable-options --config build.jobs=123") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_BUILD_JOBS", "1") - .with_stdout("build.jobs = 123 # --config cli option") - .with_stderr("") - .run(); - - cargo_process("config get --show-origin build.rustflags -Zunstable-options --config") - .arg("build.rustflags=[\"cli1\",\"cli2\"]") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") - .with_stdout( - "\ -build.rustflags = [ - \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml - \"--flag-global\", # [ROOT]/home/.cargo/config.toml - \"cli1\", # --config cli option - \"cli2\", # --config cli option - \"env1\", # environment variable `CARGO_BUILD_RUSTFLAGS` - \"env2\", # environment variable `CARGO_BUILD_RUSTFLAGS` -] -", - ) - .with_stderr("") - .run(); -} - -#[cargo_test] -fn show_origin_json() { - let sub_folder = common_setup(); - cargo_process("config get --show-origin --format=json -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("error: the `json` format does not support --show-origin, try the `toml` format instead") - .run(); -} - -#[cargo_test] -fn unmerged_toml() { - let sub_folder = common_setup(); - cargo_process("config get --merged=no -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_ALIAS_BAR", "cat dog") - .env("CARGO_BUILD_JOBS", "100") - .with_stdout( - "\ -# Environment variables -# CARGO=[..] -# CARGO_ALIAS_BAR=[..]cat dog[..] -# CARGO_BUILD_JOBS=100 -# CARGO_HOME=[ROOT]/home/.cargo - -# [ROOT]/foo/.cargo/config.toml -alias.sub-example = [\"sub\", \"example\"] -build.rustflags = [\"--flag-directory\"] - -# [ROOT]/home/.cargo/config.toml -alias.foo = \"abc --xyz\" -build.jobs = 99 -build.rustflags = [\"--flag-global\"] -extra-table.somekey = \"somevalue\" -profile.dev.opt-level = 3 -profile.dev.package.foo.opt-level = 1 -target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" - -", - ) - .with_stderr("") - .run(); - - cargo_process("config get --merged=no build.rustflags -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") - .with_stdout( - "\ -# Environment variables -# CARGO_BUILD_RUSTFLAGS=[..]env1 env2[..] - -# [ROOT]/foo/.cargo/config.toml -build.rustflags = [\"--flag-directory\"] - -# [ROOT]/home/.cargo/config.toml -build.rustflags = [\"--flag-global\"] - -", - ) - .with_stderr("") - .run(); - - cargo_process("config get --merged=no does.not.exist -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stderr("") - .with_stderr("") - .run(); - - cargo_process("config get --merged=no build.rustflags.extra -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "error: expected table for configuration key `build.rustflags`, \ - but found array in [ROOT]/foo/.cargo/config.toml", - ) - .run(); -} - -#[cargo_test] -fn unmerged_toml_cli() { - let sub_folder = common_setup(); - cargo_process("config get --merged=no build.rustflags -Zunstable-options --config") - .arg("build.rustflags=[\"cli1\",\"cli2\"]") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .env("CARGO_BUILD_RUSTFLAGS", "env1 env2") - .with_stdout( - "\ -# --config cli option -build.rustflags = [\"cli1\", \"cli2\"] - -# Environment variables -# CARGO_BUILD_RUSTFLAGS=[..]env1 env2[..] - -# [ROOT]/foo/.cargo/config.toml -build.rustflags = [\"--flag-directory\"] - -# [ROOT]/home/.cargo/config.toml -build.rustflags = [\"--flag-global\"] - -", - ) - .with_stderr("") - .run(); -} - -#[cargo_test] -fn unmerged_json() { - let sub_folder = common_setup(); - cargo_process("config get --merged=no --format=json -Zunstable-options") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "error: the `json` format does not support --merged=no, try the `toml` format instead", - ) - .run(); -} - -#[cargo_test] -fn includes() { - let sub_folder = common_setup(); - fs::write( - sub_folder.join("config.toml"), - " - include = 'other.toml' - [build] - rustflags = [\"--flag-directory\"] - ", - ) - .unwrap(); - fs::write( - sub_folder.join("other.toml"), - " - [build] - rustflags = [\"--flag-other\"] - ", - ) - .unwrap(); - - cargo_process("config get build.rustflags -Zunstable-options -Zconfig-include") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stdout(r#"build.rustflags = ["--flag-other", "--flag-directory", "--flag-global"]"#) - .with_stderr("") - .run(); - - cargo_process( - "config get build.rustflags --show-origin=yes -Zunstable-options -Zconfig-include", - ) - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -build.rustflags = [ - \"--flag-other\", # [ROOT]/foo/.cargo/other.toml - \"--flag-directory\", # [ROOT]/foo/.cargo/config.toml - \"--flag-global\", # [ROOT]/home/.cargo/config.toml -] -", - ) - .with_stderr("") - .run(); - - cargo_process("config get --merged=no -Zunstable-options -Zconfig-include") - .cwd(&sub_folder.parent().unwrap()) - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -# Environment variables -# CARGO=[..] -# CARGO_HOME=[ROOT]/home/.cargo - -# [ROOT]/foo/.cargo/other.toml -build.rustflags = [\"--flag-other\"] - -# [ROOT]/foo/.cargo/config.toml -build.rustflags = [\"--flag-directory\"] -include = \"other.toml\" - -# [ROOT]/home/.cargo/config.toml -alias.foo = \"abc --xyz\" -build.jobs = 99 -build.rustflags = [\"--flag-global\"] -extra-table.somekey = \"somevalue\" -profile.dev.opt-level = 3 -profile.dev.package.foo.opt-level = 1 -target.\"cfg(target_os = \\\"linux\\\")\".runner = \"runme\" - -", - ) - .with_stderr("") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_env_config.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_env_config.rs deleted file mode 100644 index 352d9b398..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_env_config.rs +++ /dev/null @@ -1,156 +0,0 @@ -//! Tests for `[env]` config. - -use cargo_test_support::{basic_bin_manifest, project}; - -#[cargo_test] -fn env_basic() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - use std::env; - fn main() { - println!( "compile-time:{}", env!("ENV_TEST_1233") ); - println!( "run-time:{}", env::var("ENV_TEST_1233").unwrap()); - } - "#, - ) - .file( - ".cargo/config", - r#" - [env] - ENV_TEST_1233 = "Hello" - "#, - ) - .build(); - - p.cargo("run") - .with_stdout_contains("compile-time:Hello") - .with_stdout_contains("run-time:Hello") - .run(); -} - -#[cargo_test] -fn env_invalid() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - fn main() { - } - "#, - ) - .file( - ".cargo/config", - r#" - [env] - ENV_TEST_BOOL = false - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]could not load config key `env.ENV_TEST_BOOL`") - .run(); -} - -#[cargo_test] -fn env_force() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - use std::env; - fn main() { - println!( "ENV_TEST_FORCED:{}", env!("ENV_TEST_FORCED") ); - println!( "ENV_TEST_UNFORCED:{}", env!("ENV_TEST_UNFORCED") ); - println!( "ENV_TEST_UNFORCED_DEFAULT:{}", env!("ENV_TEST_UNFORCED_DEFAULT") ); - } - "#, - ) - .file( - ".cargo/config", - r#" - [env] - ENV_TEST_UNFORCED_DEFAULT = "from-config" - ENV_TEST_UNFORCED = { value = "from-config", force = false } - ENV_TEST_FORCED = { value = "from-config", force = true } - "#, - ) - .build(); - - p.cargo("run") - .env("ENV_TEST_FORCED", "from-env") - .env("ENV_TEST_UNFORCED", "from-env") - .env("ENV_TEST_UNFORCED_DEFAULT", "from-env") - .with_stdout_contains("ENV_TEST_FORCED:from-config") - .with_stdout_contains("ENV_TEST_UNFORCED:from-env") - .with_stdout_contains("ENV_TEST_UNFORCED_DEFAULT:from-env") - .run(); -} - -#[cargo_test] -fn env_relative() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo2")) - .file( - "src/main.rs", - r#" - use std::env; - use std::path::Path; - fn main() { - println!( "ENV_TEST_REGULAR:{}", env!("ENV_TEST_REGULAR") ); - println!( "ENV_TEST_REGULAR_DEFAULT:{}", env!("ENV_TEST_REGULAR_DEFAULT") ); - println!( "ENV_TEST_RELATIVE:{}", env!("ENV_TEST_RELATIVE") ); - - assert!( Path::new(env!("ENV_TEST_RELATIVE")).is_absolute() ); - assert!( !Path::new(env!("ENV_TEST_REGULAR")).is_absolute() ); - assert!( !Path::new(env!("ENV_TEST_REGULAR_DEFAULT")).is_absolute() ); - } - "#, - ) - .file( - ".cargo/config", - r#" - [env] - ENV_TEST_REGULAR = { value = "Cargo.toml", relative = false } - ENV_TEST_REGULAR_DEFAULT = "Cargo.toml" - ENV_TEST_RELATIVE = { value = "Cargo.toml", relative = true } - "#, - ) - .build(); - - p.cargo("run").run(); -} - -#[cargo_test] -fn env_no_override() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("unchanged")) - .file( - "src/main.rs", - r#" - use std::env; - fn main() { - println!( "CARGO_PKG_NAME:{}", env!("CARGO_PKG_NAME") ); - } - "#, - ) - .file( - ".cargo/config", - r#" - [env] - CARGO_PKG_NAME = { value = "from-config", force = true } - "#, - ) - .build(); - - p.cargo("run -Zconfigurable-env") - .masquerade_as_nightly_cargo() - .with_stdout_contains("CARGO_PKG_NAME:unchanged") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_features.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_features.rs deleted file mode 100644 index 3bc66e748..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_features.rs +++ /dev/null @@ -1,686 +0,0 @@ -//! Tests for `cargo-features` definitions. - -use cargo_test_support::registry::Package; -use cargo_test_support::{is_nightly, project, registry}; - -#[cargo_test] -fn feature_required() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - im-a-teapot = true - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - the `im-a-teapot` manifest key is unstable and may not work properly in England - -Caused by: - feature `test-dummy-unstable` is required - - The package requires the Cargo feature called `test-dummy-unstable`, \ - but that feature is not stabilized in this version of Cargo (1.[..]). - Consider adding `cargo-features = [\"test-dummy-unstable\"]` to the top of Cargo.toml \ - (above the [package] table) to tell Cargo you are opting in to use this unstable feature. - See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html for more information \ - about the status of this feature. -", - ) - .run(); - - // Same, but stable. - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - the `im-a-teapot` manifest key is unstable and may not work properly in England - -Caused by: - feature `test-dummy-unstable` is required - - The package requires the Cargo feature called `test-dummy-unstable`, \ - but that feature is not stabilized in this version of Cargo (1.[..]). - Consider trying a newer version of Cargo (this may require the nightly release). - See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ - for more information about the status of this feature. -", - ) - .run(); -} - -#[cargo_test] -fn feature_required_dependency() { - // The feature has been stabilized by a future version of Cargo, and - // someone published something uses it, but this version of Cargo has not - // yet stabilized it. Don't suggest editing Cargo.toml, since published - // packages shouldn't be edited. - Package::new("bar", "1.0.0") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - im-a-teapot = true - "#, - ) - .file("src/lib.rs", "") - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] [..] -[DOWNLOADED] bar v1.0.0 [..] -error: failed to download replaced source registry `crates-io` - -Caused by: - failed to parse manifest at `[..]/bar-1.0.0/Cargo.toml` - -Caused by: - the `im-a-teapot` manifest key is unstable and may not work properly in England - -Caused by: - feature `test-dummy-unstable` is required - - The package requires the Cargo feature called `test-dummy-unstable`, \ - but that feature is not stabilized in this version of Cargo (1.[..]). - Consider trying a more recent nightly release. - See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ - for more information about the status of this feature. -", - ) - .run(); - - // Same, but stable. - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to download `bar v1.0.0` - -Caused by: - unable to get packages from source - -Caused by: - failed to download replaced source registry `crates-io` - -Caused by: - failed to parse manifest at `[..]/bar-1.0.0/Cargo.toml` - -Caused by: - the `im-a-teapot` manifest key is unstable and may not work properly in England - -Caused by: - feature `test-dummy-unstable` is required - - The package requires the Cargo feature called `test-dummy-unstable`, \ - but that feature is not stabilized in this version of Cargo (1.[..]). - Consider trying a newer version of Cargo (this may require the nightly release). - See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html \ - for more information about the status of this feature. -", - ) - .run(); -} - -#[cargo_test] -fn unknown_feature() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["foo"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - unknown cargo feature `foo` -", - ) - .run(); -} - -#[cargo_test] -fn stable_feature_warns() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["test-dummy-stable"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build") - .with_stderr( - "\ -warning: the cargo feature `test-dummy-stable` has been stabilized in the 1.0 \ -release and is no longer necessary to be listed in the manifest - See https://doc.rust-lang.org/[..]cargo/ for more information about using this feature. -[COMPILING] a [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn allow_features() { - if !is_nightly() { - // -Zallow-features on rustc is nightly only - eprintln!("skipping test allow_features without nightly rustc"); - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["test-dummy-unstable"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - im-a-teapot = true - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("-Zallow-features=test-dummy-unstable build") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] a [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("-Zallow-features=test-dummy-unstable,print-im-a-teapot -Zprint-im-a-teapot build") - .masquerade_as_nightly_cargo() - .with_stdout("im-a-teapot = true") - .run(); - - p.cargo("-Zallow-features=test-dummy-unstable -Zprint-im-a-teapot build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: the feature `print-im-a-teapot` is not in the list of allowed features: [test-dummy-unstable] -", - ) - .run(); - - p.cargo("-Zallow-features= build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - the feature `test-dummy-unstable` is not in the list of allowed features: [] -", - ) - .run(); -} - -#[cargo_test] -fn allow_features_to_rustc() { - if !is_nightly() { - // -Zallow-features on rustc is nightly only - eprintln!("skipping test allow_features_to_rustc without nightly rustc"); - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(test_2018_feature)] - "#, - ) - .build(); - - p.cargo("-Zallow-features= build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains("[..]E0725[..]") - .run(); - - p.cargo("-Zallow-features=test_2018_feature build") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] a [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn allow_features_in_cfg() { - if !is_nightly() { - // -Zallow-features on rustc is nightly only - eprintln!("skipping test allow_features_in_cfg without nightly rustc"); - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["test-dummy-unstable"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - im-a-teapot = true - "#, - ) - .file( - ".cargo/config.toml", - r#" - [unstable] - allow-features = ["test-dummy-unstable", "print-im-a-teapot"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] a [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("-Zprint-im-a-teapot build") - .masquerade_as_nightly_cargo() - .with_stdout("im-a-teapot = true") - .with_stderr("[FINISHED] [..]") - .run(); - - p.cargo("-Zunstable-options build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: the feature `unstable-options` is not in the list of allowed features: [print-im-a-teapot, test-dummy-unstable] -", - ) - .run(); - - // -Zallow-features overrides .cargo/config - p.cargo("-Zallow-features=test-dummy-unstable -Zprint-im-a-teapot build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: the feature `print-im-a-teapot` is not in the list of allowed features: [test-dummy-unstable] -", - ) - .run(); - - p.cargo("-Zallow-features= build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - the feature `test-dummy-unstable` is not in the list of allowed features: [] -", - ) - .run(); -} - -#[cargo_test] -fn nightly_feature_requires_nightly() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["test-dummy-unstable"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - im-a-teapot = true - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] a [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ - but this is the `stable` channel - See [..] - See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \ - information about using this feature. -", - ) - .run(); -} - -#[cargo_test] -fn nightly_feature_requires_nightly_in_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - cargo-features = ["test-dummy-unstable"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - im-a-teapot = true - "#, - ) - .file("a/src/lib.rs", "") - .build(); - p.cargo("build") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] a [..] -[COMPILING] b [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `a` as a dependency of package `b v0.0.1 ([..])` - -Caused by: - failed to load source for dependency `a` - -Caused by: - Unable to update [..] - -Caused by: - failed to parse manifest at `[..]` - -Caused by: - the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ - but this is the `stable` channel - See [..] - See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \ - information about using this feature. -", - ) - .run(); -} - -#[cargo_test] -fn cant_publish() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["test-dummy-unstable"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - im-a-teapot = true - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] a [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - the cargo feature `test-dummy-unstable` requires a nightly version of Cargo, \ - but this is the `stable` channel - See [..] - See https://doc.rust-lang.org/[..]cargo/reference/unstable.html for more \ - information about using this feature. -", - ) - .run(); -} - -#[cargo_test] -fn z_flags_rejected() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["test-dummy-unstable"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - im-a-teapot = true - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build -Zprint-im-a-teapot") - .with_status(101) - .with_stderr( - "error: the `-Z` flag is only accepted on the nightly \ - channel of Cargo, but this is the `stable` channel\n\ - See [..]", - ) - .run(); - - p.cargo("build -Zarg") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("error: unknown `-Z` flag specified: arg") - .run(); - - p.cargo("build -Zprint-im-a-teapot") - .masquerade_as_nightly_cargo() - .with_stdout("im-a-teapot = true\n") - .with_stderr( - "\ -[COMPILING] a [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn publish_allowed() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["test-dummy-unstable"] - - [package] - name = "a" - version = "0.0.1" - authors = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("publish --token sekrit") - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn wrong_position() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - cargo-features = ["test-dummy-unstable"] - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at [..] - -Caused by: - cargo-features = [\"test-dummy-unstable\"] was found in the wrong location: it \ - should be set at the top of Cargo.toml before any tables -", - ) - .run(); -} - -#[cargo_test] -fn z_stabilized() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("check -Z cache-messages") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -warning: flag `-Z cache-messages` has been stabilized in the 1.40 release, \ - and is no longer necessary - Message caching is now always enabled. - -[CHECKING] foo [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("check -Z offline") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: flag `-Z offline` has been stabilized in the 1.36 release - Offline mode is now available via the --offline CLI option - -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_targets.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_targets.rs deleted file mode 100644 index fcf293019..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cargo_targets.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! Tests specifically related to target handling (lib, bins, examples, tests, benches). - -use cargo_test_support::project; - -#[cargo_test] -fn warn_unmatched_target_filters() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [lib] - test = false - bench = false - "#, - ) - .file("src/lib.rs", r#"fn main() {}"#) - .build(); - - p.cargo("check --tests --bins --examples --benches") - .with_stderr( - "\ -[WARNING] Target filters `bins`, `tests`, `examples`, `benches` specified, \ -but no targets matched. This is a no-op -[FINISHED][..] -", - ) - .run(); -} - -#[cargo_test] -fn reserved_windows_target_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [[bin]] - name = "con" - path = "src/main.rs" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - if cfg!(windows) { - p.cargo("check") - .with_stderr( - "\ -[WARNING] binary target `con` is a reserved Windows filename, \ -this target will not work on Windows platforms -[CHECKING] foo[..] -[FINISHED][..] -", - ) - .run(); - } else { - p.cargo("check") - .with_stderr("[CHECKING] foo[..]\n[FINISHED][..]") - .run(); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cfg.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cfg.rs deleted file mode 100644 index 3f79db772..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cfg.rs +++ /dev/null @@ -1,475 +0,0 @@ -//! Tests for cfg() expressions. - -use cargo_test_support::registry::Package; -use cargo_test_support::rustc_host; -use cargo_test_support::{basic_manifest, project}; - -#[cargo_test] -fn cfg_easy() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [target.'cfg(unix)'.dependencies] - b = { path = 'b' } - [target."cfg(windows)".dependencies] - b = { path = 'b' } - "#, - ) - .file("src/lib.rs", "extern crate b;") - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn dont_include() { - let other_family = if cfg!(unix) { "windows" } else { "unix" }; - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [target.'cfg({})'.dependencies] - b = {{ path = 'b' }} - "#, - other_family - ), - ) - .file("src/lib.rs", "") - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - p.cargo("build") - .with_stderr( - "\ -[COMPILING] a v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn works_through_the_registry() { - Package::new("baz", "0.1.0").publish(); - Package::new("bar", "0.1.0") - .target_dep("baz", "0.1.0", "cfg(unix)") - .target_dep("baz", "0.1.0", "cfg(windows)") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file( - "src/lib.rs", - "#[allow(unused_extern_crates)] extern crate bar;", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] [..] index -[DOWNLOADING] crates ... -[DOWNLOADED] [..] -[DOWNLOADED] [..] -[COMPILING] baz v0.1.0 -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn ignore_version_from_other_platform() { - let this_family = if cfg!(unix) { "unix" } else { "windows" }; - let other_family = if cfg!(unix) { "windows" } else { "unix" }; - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.2.0").publish(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.'cfg({})'.dependencies] - bar = "0.1.0" - - [target.'cfg({})'.dependencies] - bar = "0.2.0" - "#, - this_family, other_family - ), - ) - .file( - "src/lib.rs", - "#[allow(unused_extern_crates)] extern crate bar;", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] [..] index -[DOWNLOADING] crates ... -[DOWNLOADED] [..] -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn bad_target_spec() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.'cfg(4)'.dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - failed to parse `4` as a cfg expression: unexpected character `4` in cfg, [..] -", - ) - .run(); -} - -#[cargo_test] -fn bad_target_spec2() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.'cfg(bar =)'.dependencies] - baz = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - failed to parse `bar =` as a cfg expression: expected a string, but cfg expression ended -", - ) - .run(); -} - -#[cargo_test] -fn multiple_match_ok() { - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [target.'cfg(unix)'.dependencies] - b = {{ path = 'b' }} - [target.'cfg(target_family = "unix")'.dependencies] - b = {{ path = 'b' }} - [target."cfg(windows)".dependencies] - b = {{ path = 'b' }} - [target.'cfg(target_family = "windows")'.dependencies] - b = {{ path = 'b' }} - [target."cfg(any(windows, unix))".dependencies] - b = {{ path = 'b' }} - - [target.{}.dependencies] - b = {{ path = 'b' }} - "#, - rustc_host() - ), - ) - .file("src/lib.rs", "extern crate b;") - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn any_ok() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [target."cfg(any(windows, unix))".dependencies] - b = { path = 'b' } - "#, - ) - .file("src/lib.rs", "extern crate b;") - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - p.cargo("build -v").run(); -} - -// https://github.com/rust-lang/cargo/issues/5313 -#[cargo_test] -#[cfg(all(target_arch = "x86_64", target_os = "linux", target_env = "gnu"))] -fn cfg_looks_at_rustflags_for_target() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [target.'cfg(with_b)'.dependencies] - b = { path = 'b' } - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(with_b)] - extern crate b; - - fn main() { b::foo(); } - "#, - ) - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "pub fn foo() {}") - .build(); - - p.cargo("build --target x86_64-unknown-linux-gnu") - .env("RUSTFLAGS", "--cfg with_b") - .run(); -} - -#[cargo_test] -fn bad_cfg_discovery() { - // Check error messages when `rustc -v` and `rustc --print=*` parsing fails. - // - // This is a `rustc` replacement which behaves differently based on an - // environment variable. - let p = project() - .at("compiler") - .file("Cargo.toml", &basic_manifest("compiler", "0.1.0")) - .file( - "src/main.rs", - r#" - fn run_rustc() -> String { - let mut cmd = std::process::Command::new("rustc"); - for arg in std::env::args_os().skip(1) { - cmd.arg(arg); - } - String::from_utf8(cmd.output().unwrap().stdout).unwrap() - } - - fn main() { - let mode = std::env::var("FUNKY_MODE").unwrap(); - if mode == "bad-version" { - println!("foo"); - return; - } - if std::env::args_os().any(|a| a == "-vV") { - print!("{}", run_rustc()); - return; - } - if mode == "no-crate-types" { - return; - } - if mode == "bad-crate-type" { - println!("foo"); - return; - } - let output = run_rustc(); - let mut lines = output.lines(); - let sysroot = loop { - let line = lines.next().unwrap(); - if line.contains("___") { - println!("{}", line); - } else { - break line; - } - }; - if mode == "no-sysroot" { - return; - } - println!("{}", sysroot); - if mode != "bad-cfg" { - panic!("unexpected"); - } - println!("123"); - } - "#, - ) - .build(); - p.cargo("build").run(); - let funky_rustc = p.bin("compiler"); - - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build") - .env("RUSTC", &funky_rustc) - .env("FUNKY_MODE", "bad-version") - .with_status(101) - .with_stderr( - "\ -[ERROR] `rustc -vV` didn't have a line for `host:`, got: -foo - -", - ) - .run(); - - p.cargo("build") - .env("RUSTC", &funky_rustc) - .env("FUNKY_MODE", "no-crate-types") - .with_status(101) - .with_stderr( - "\ -[ERROR] malformed output when learning about crate-type bin information -command was: `[..]compiler[..] --crate-name ___ [..]` -(no output received) -", - ) - .run(); - - p.cargo("build") - .env("RUSTC", &funky_rustc) - .env("FUNKY_MODE", "no-sysroot") - .with_status(101) - .with_stderr( - "\ -[ERROR] output of --print=sysroot missing when learning about target-specific information from rustc -command was: `[..]compiler[..]--crate-type [..]` - ---- stdout -[..]___[..] -[..]___[..] -[..]___[..] -[..]___[..] -[..]___[..] -[..]___[..] - -", - ) - .run(); - - p.cargo("build") - .env("RUSTC", &funky_rustc) - .env("FUNKY_MODE", "bad-cfg") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse the cfg from `rustc --print=cfg`, got: -[..]___[..] -[..]___[..] -[..]___[..] -[..]___[..] -[..]___[..] -[..]___[..] -[..] -123 - - -Caused by: - failed to parse `123` as a cfg expression: unexpected character `1` in cfg, \ - expected parens, a comma, an identifier, or a string -", - ) - .run(); -} - -#[cargo_test] -fn exclusive_dep_kinds() { - // Checks for a bug where the same package with different cfg expressions - // was not being filtered correctly. - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [target.'cfg(abc)'.dependencies] - bar = "1.0" - - [target.'cfg(not(abc))'.build-dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "extern crate bar; fn main() {}") - .build(); - - p.cargo("check").run(); - p.change_file("src/lib.rs", "extern crate bar;"); - p.cargo("check") - .with_status(101) - // can't find crate for `bar` - .with_stderr_contains("[..]E0463[..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/check.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/check.rs deleted file mode 100644 index e37d572f5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/check.rs +++ /dev/null @@ -1,999 +0,0 @@ -//! Tests for the `cargo check` command. - -use std::fmt::{self, Write}; - -use cargo_test_support::install::exe; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::Package; -use cargo_test_support::tools; -use cargo_test_support::{basic_manifest, project}; - -#[cargo_test] -fn check_success() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - "extern crate bar; fn main() { ::bar::baz(); }", - ) - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("check").run(); -} - -#[cargo_test] -fn check_fail() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - "extern crate bar; fn main() { ::bar::baz(42); }", - ) - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("check") - .with_status(101) - .with_stderr_contains("[..]this function takes 0[..]") - .run(); -} - -#[cargo_test] -fn custom_derive() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - r#" - #[macro_use] - extern crate bar; - - trait B { - fn b(&self); - } - - #[derive(B)] - struct A; - - fn main() { - let a = A; - a.b(); - } - "#, - ) - .build(); - let _bar = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - [lib] - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate proc_macro; - - use proc_macro::TokenStream; - - #[proc_macro_derive(B)] - pub fn derive(_input: TokenStream) -> TokenStream { - format!("impl B for A {{ fn b(&self) {{}} }}").parse().unwrap() - } - "#, - ) - .build(); - - foo.cargo("check").run(); -} - -#[cargo_test] -fn check_build() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - "extern crate bar; fn main() { ::bar::baz(); }", - ) - .build(); - - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("check").run(); - foo.cargo("build").run(); -} - -#[cargo_test] -fn build_check() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - "extern crate bar; fn main() { ::bar::baz(); }", - ) - .build(); - - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("build -v").run(); - foo.cargo("check -v").run(); -} - -// Checks that where a project has both a lib and a bin, the lib is only checked -// not built. -#[cargo_test] -fn issue_3418() { - let foo = project() - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .build(); - - foo.cargo("check -v") - .with_stderr_contains("[..] --emit=[..]metadata [..]") - .run(); -} - -// Some weirdness that seems to be caused by a crate being built as well as -// checked, but in this case with a proc macro too. -#[cargo_test] -fn issue_3419() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - rustc-serialize = "*" - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate rustc_serialize; - - use rustc_serialize::Decodable; - - pub fn take() {} - "#, - ) - .file( - "src/main.rs", - r#" - extern crate rustc_serialize; - - extern crate foo; - - #[derive(RustcDecodable)] - pub struct Foo; - - fn main() { - foo::take::(); - } - "#, - ) - .build(); - - Package::new("rustc-serialize", "1.0.0") - .file( - "src/lib.rs", - r#" - pub trait Decodable: Sized { - fn decode(d: &mut D) -> Result; - } - pub trait Decoder { - type Error; - fn read_struct(&mut self, s_name: &str, len: usize, f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - } - "#, - ) - .publish(); - - p.cargo("check").run(); -} - -// Check on a dylib should have a different metadata hash than build. -#[cargo_test] -fn dylib_check_preserves_build_cache() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [lib] - crate-type = ["dylib"] - - [dependencies] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[..]Compiling foo v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("check").run(); - - p.cargo("build") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); -} - -// test `cargo rustc --profile check` -#[cargo_test] -fn rustc_check() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - "extern crate bar; fn main() { ::bar::baz(); }", - ) - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("rustc --profile check -- --emit=metadata").run(); - - // Verify compatible usage of --profile with --release, issue #7488 - foo.cargo("rustc --profile check --release -- --emit=metadata") - .run(); - foo.cargo("rustc --profile test --release -- --emit=metadata") - .run(); -} - -#[cargo_test] -fn rustc_check_err() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - "extern crate bar; fn main() { ::bar::qux(); }", - ) - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("rustc --profile check -- --emit=metadata") - .with_status(101) - .with_stderr_contains("[CHECKING] bar [..]") - .with_stderr_contains("[CHECKING] foo [..]") - .with_stderr_contains("[..]cannot find function `qux` in [..] `bar`") - .run(); -} - -#[cargo_test] -fn check_all() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [workspace] - [dependencies] - b = { path = "b" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("examples/a.rs", "fn main() {}") - .file("tests/a.rs", "") - .file("src/lib.rs", "") - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/main.rs", "fn main() {}") - .file("b/src/lib.rs", "") - .build(); - - p.cargo("check --workspace -v") - .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") - .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") - .with_stderr_contains("[..] --crate-name b b/src/lib.rs [..]") - .with_stderr_contains("[..] --crate-name b b/src/main.rs [..]") - .run(); -} - -#[cargo_test] -fn check_all_exclude() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("check --workspace --exclude baz") - .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]") - .with_stderr( - "\ -[CHECKING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn check_all_exclude_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("check --workspace --exclude '*z'") - .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]") - .with_stderr( - "\ -[CHECKING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn check_virtual_all_implied() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("check -v") - .with_stderr_contains("[..] --crate-name bar bar/src/lib.rs [..]") - .with_stderr_contains("[..] --crate-name baz baz/src/lib.rs [..]") - .run(); -} - -#[cargo_test] -fn check_virtual_manifest_one_project() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("check -p bar") - .with_stderr_does_not_contain("[CHECKING] baz v0.1.0 [..]") - .with_stderr( - "\ -[CHECKING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn check_virtual_manifest_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("check -p '*z'") - .with_stderr_does_not_contain("[CHECKING] bar v0.1.0 [..]") - .with_stderr( - "\ -[CHECKING] baz v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn exclude_warns_on_non_existing_package() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("check --workspace --exclude bar") - .with_stdout("") - .with_stderr( - "\ -[WARNING] excluded package(s) `bar` not found in workspace `[CWD]` -[CHECKING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn targets_selected_default() { - let foo = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", "pub fn smth() {}") - .file("examples/example1.rs", "fn main() {}") - .file("tests/test2.rs", "#[test] fn t() {}") - .file("benches/bench3.rs", "") - .build(); - - foo.cargo("check -v") - .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") - .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") - .with_stderr_does_not_contain("[..] --crate-name example1 examples/example1.rs [..]") - .with_stderr_does_not_contain("[..] --crate-name test2 tests/test2.rs [..]") - .with_stderr_does_not_contain("[..] --crate-name bench3 benches/bench3.rs [..]") - .run(); -} - -#[cargo_test] -fn targets_selected_all() { - let foo = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", "pub fn smth() {}") - .file("examples/example1.rs", "fn main() {}") - .file("tests/test2.rs", "#[test] fn t() {}") - .file("benches/bench3.rs", "") - .build(); - - foo.cargo("check --all-targets -v") - .with_stderr_contains("[..] --crate-name foo src/lib.rs [..]") - .with_stderr_contains("[..] --crate-name foo src/main.rs [..]") - .with_stderr_contains("[..] --crate-name example1 examples/example1.rs [..]") - .with_stderr_contains("[..] --crate-name test2 tests/test2.rs [..]") - .with_stderr_contains("[..] --crate-name bench3 benches/bench3.rs [..]") - .run(); -} - -#[cargo_test] -fn check_unit_test_profile() { - let foo = project() - .file( - "src/lib.rs", - r#" - #[cfg(test)] - mod tests { - #[test] - fn it_works() { - badtext - } - } - "#, - ) - .build(); - - foo.cargo("check").run(); - foo.cargo("check --profile test") - .with_status(101) - .with_stderr_contains("[..]badtext[..]") - .run(); -} - -// Verify what is checked with various command-line filters. -#[cargo_test] -fn check_filters() { - let p = project() - .file( - "src/lib.rs", - r#" - fn unused_normal_lib() {} - #[cfg(test)] - mod tests { - fn unused_unit_lib() {} - } - "#, - ) - .file( - "src/main.rs", - r#" - fn main() {} - fn unused_normal_bin() {} - #[cfg(test)] - mod tests { - fn unused_unit_bin() {} - } - "#, - ) - .file( - "tests/t1.rs", - r#" - fn unused_normal_t1() {} - #[cfg(test)] - mod tests { - fn unused_unit_t1() {} - } - "#, - ) - .file( - "examples/ex1.rs", - r#" - fn main() {} - fn unused_normal_ex1() {} - #[cfg(test)] - mod tests { - fn unused_unit_ex1() {} - } - "#, - ) - .file( - "benches/b1.rs", - r#" - fn unused_normal_b1() {} - #[cfg(test)] - mod tests { - fn unused_unit_b1() {} - } - "#, - ) - .build(); - - p.cargo("check") - .with_stderr_contains("[..]unused_normal_lib[..]") - .with_stderr_contains("[..]unused_normal_bin[..]") - .with_stderr_does_not_contain("[..]unused_normal_t1[..]") - .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") - .with_stderr_does_not_contain("[..]unused_normal_b1[..]") - .with_stderr_does_not_contain("[..]unused_unit_[..]") - .run(); - p.root().join("target").rm_rf(); - p.cargo("check --tests -v") - .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --test [..]") - .with_stderr_contains("[..] --crate-name foo src/lib.rs [..] --crate-type lib [..]") - .with_stderr_contains("[..] --crate-name foo src/main.rs [..] --test [..]") - .with_stderr_contains("[..]unused_unit_lib[..]") - .with_stderr_contains("[..]unused_unit_bin[..]") - .with_stderr_contains("[..]unused_normal_lib[..]") - .with_stderr_contains("[..]unused_normal_bin[..]") - .with_stderr_contains("[..]unused_unit_t1[..]") - .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") - .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") - .with_stderr_does_not_contain("[..]unused_normal_b1[..]") - .with_stderr_does_not_contain("[..]unused_unit_b1[..]") - .with_stderr_does_not_contain("[..]--crate-type bin[..]") - .run(); - p.root().join("target").rm_rf(); - p.cargo("check --test t1 -v") - .with_stderr_contains("[..]unused_normal_lib[..]") - .with_stderr_contains("[..]unused_unit_t1[..]") - .with_stderr_does_not_contain("[..]unused_unit_lib[..]") - .with_stderr_does_not_contain("[..]unused_normal_bin[..]") - .with_stderr_does_not_contain("[..]unused_unit_bin[..]") - .with_stderr_does_not_contain("[..]unused_normal_ex1[..]") - .with_stderr_does_not_contain("[..]unused_normal_b1[..]") - .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") - .with_stderr_does_not_contain("[..]unused_unit_b1[..]") - .run(); - p.root().join("target").rm_rf(); - p.cargo("check --all-targets -v") - .with_stderr_contains("[..]unused_normal_lib[..]") - .with_stderr_contains("[..]unused_normal_bin[..]") - .with_stderr_contains("[..]unused_normal_t1[..]") - .with_stderr_contains("[..]unused_normal_ex1[..]") - .with_stderr_contains("[..]unused_normal_b1[..]") - .with_stderr_contains("[..]unused_unit_b1[..]") - .with_stderr_contains("[..]unused_unit_t1[..]") - .with_stderr_contains("[..]unused_unit_lib[..]") - .with_stderr_contains("[..]unused_unit_bin[..]") - .with_stderr_does_not_contain("[..]unused_unit_ex1[..]") - .run(); -} - -#[cargo_test] -fn check_artifacts() { - // Verify which artifacts are created when running check (#4059). - let p = project() - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .file("tests/t1.rs", "") - .file("examples/ex1.rs", "fn main() {}") - .file("benches/b1.rs", "") - .build(); - - p.cargo("check").run(); - assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); - assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); - assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); - assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 2); - - p.root().join("target").rm_rf(); - p.cargo("check --lib").run(); - assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); - assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); - assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); - assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); - - p.root().join("target").rm_rf(); - p.cargo("check --bin foo").run(); - assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); - assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); - assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); - assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 2); - - p.root().join("target").rm_rf(); - p.cargo("check --test t1").run(); - assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); - assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); - assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); - assert_eq!(p.glob("target/debug/t1-*").count(), 0); - assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); - assert_eq!(p.glob("target/debug/deps/libt1-*.rmeta").count(), 1); - - p.root().join("target").rm_rf(); - p.cargo("check --example ex1").run(); - assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); - assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); - assert!(!p - .root() - .join("target/debug/examples") - .join(exe("ex1")) - .is_file()); - assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); - assert_eq!(p.glob("target/debug/examples/libex1-*.rmeta").count(), 1); - - p.root().join("target").rm_rf(); - p.cargo("check --bench b1").run(); - assert!(!p.root().join("target/debug/libfoo.rmeta").is_file()); - assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); - assert!(!p.root().join("target/debug").join(exe("foo")).is_file()); - assert_eq!(p.glob("target/debug/b1-*").count(), 0); - assert_eq!(p.glob("target/debug/deps/libfoo-*.rmeta").count(), 1); - assert_eq!(p.glob("target/debug/deps/libb1-*.rmeta").count(), 1); -} - -#[cargo_test] -fn short_message_format() { - let foo = project() - .file("src/lib.rs", "fn foo() { let _x: bool = 'a'; }") - .build(); - foo.cargo("check --message-format=short") - .with_status(101) - .with_stderr_contains( - "\ -src/lib.rs:1:27: error[E0308]: mismatched types -error: could not compile `foo` due to previous error -", - ) - .run(); -} - -#[cargo_test] -fn proc_macro() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "demo" - version = "0.0.1" - - [lib] - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate proc_macro; - - use proc_macro::TokenStream; - - #[proc_macro_derive(Foo)] - pub fn demo(_input: TokenStream) -> TokenStream { - "".parse().unwrap() - } - "#, - ) - .file( - "src/main.rs", - r#" - #[macro_use] - extern crate demo; - - #[derive(Foo)] - struct A; - - fn main() {} - "#, - ) - .build(); - p.cargo("check -v").env("CARGO_LOG", "cargo=trace").run(); -} - -#[cargo_test] -fn does_not_use_empty_rustc_wrapper() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("check").env("RUSTC_WRAPPER", "").run(); -} - -#[cargo_test] -fn does_not_use_empty_rustc_workspace_wrapper() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("check").env("RUSTC_WORKSPACE_WRAPPER", "").run(); -} - -#[cargo_test] -fn error_from_deep_recursion() -> Result<(), fmt::Error> { - let mut big_macro = String::new(); - writeln!(big_macro, "macro_rules! m {{")?; - for i in 0..130 { - writeln!(big_macro, "({}) => {{ m!({}); }};", i, i + 1)?; - } - writeln!(big_macro, "}}")?; - writeln!(big_macro, "m!(0);")?; - - let p = project().file("src/lib.rs", &big_macro).build(); - p.cargo("check --message-format=json") - .with_status(101) - .with_stdout_contains( - "[..]\"message\":\"recursion limit reached while expanding [..]`m[..]`\"[..]", - ) - .run(); - - Ok(()) -} - -#[cargo_test] -fn rustc_workspace_wrapper_affects_all_workspace_members() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("check") - .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]") - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name baz [..]") - .run(); -} - -#[cargo_test] -fn rustc_workspace_wrapper_includes_path_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - - [dependencies] - baz = { path = "baz" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("check --workspace") - .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo [..]") - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]") - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name baz [..]") - .run(); -} - -#[cargo_test] -fn rustc_workspace_wrapper_respects_primary_units() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("check -p bar") - .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]") - .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name baz [..]") - .run(); -} - -#[cargo_test] -fn rustc_workspace_wrapper_excludes_published_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - - [dependencies] - baz = "1.0.0" - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - Package::new("baz", "1.0.0").publish(); - - p.cargo("check --workspace -v") - .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name foo [..]") - .with_stderr_contains("WRAPPER CALLED: rustc --crate-name bar [..]") - .with_stderr_contains("[CHECKING] baz [..]") - .with_stdout_does_not_contain("WRAPPER CALLED: rustc --crate-name baz [..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/clean.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/clean.rs deleted file mode 100644 index 1f9313451..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/clean.rs +++ /dev/null @@ -1,589 +0,0 @@ -//! Tests for the `cargo clean` command. - -use cargo_test_support::paths::is_symlink; -use cargo_test_support::registry::Package; -use cargo_test_support::{ - basic_bin_manifest, basic_manifest, git, main_file, project, project_in, rustc_host, -}; -use glob::GlobError; -use std::env; -use std::path::{Path, PathBuf}; - -#[cargo_test] -fn cargo_clean_simple() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build").run(); - assert!(p.build_dir().is_dir()); - - p.cargo("clean").run(); - assert!(!p.build_dir().is_dir()); -} - -#[cargo_test] -fn different_dir() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .file("src/bar/a.rs", "") - .build(); - - p.cargo("build").run(); - assert!(p.build_dir().is_dir()); - - p.cargo("clean").cwd("src").with_stdout("").run(); - assert!(!p.build_dir().is_dir()); -} - -#[cargo_test] -fn clean_multiple_packages() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - [dependencies.d2] - path = "d2" - - [[bin]] - name = "foo" - "#, - ) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .file("d1/Cargo.toml", &basic_bin_manifest("d1")) - .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") - .file("d2/Cargo.toml", &basic_bin_manifest("d2")) - .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }") - .build(); - - p.cargo("build -p d1 -p d2 -p foo").run(); - - let d1_path = &p - .build_dir() - .join("debug") - .join(format!("d1{}", env::consts::EXE_SUFFIX)); - let d2_path = &p - .build_dir() - .join("debug") - .join(format!("d2{}", env::consts::EXE_SUFFIX)); - - assert!(p.bin("foo").is_file()); - assert!(d1_path.is_file()); - assert!(d2_path.is_file()); - - p.cargo("clean -p d1 -p d2") - .cwd("src") - .with_stdout("") - .run(); - assert!(p.bin("foo").is_file()); - assert!(!d1_path.is_file()); - assert!(!d2_path.is_file()); -} - -#[cargo_test] -fn clean_multiple_packages_in_glob_char_path() { - let p = project_in("[d1]") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - let foo_path = &p.build_dir().join("debug").join("deps"); - - // Assert that build artifacts are produced - p.cargo("build").run(); - assert_ne!(get_build_artifacts(foo_path).len(), 0); - - // Assert that build artifacts are destroyed - p.cargo("clean -p foo").run(); - assert_eq!(get_build_artifacts(foo_path).len(), 0); -} - -fn get_build_artifacts(path: &PathBuf) -> Vec> { - let pattern = path.to_str().expect("expected utf-8 path"); - let pattern = glob::Pattern::escape(pattern); - - #[cfg(not(target_env = "msvc"))] - const FILE: &str = "foo-*"; - - #[cfg(target_env = "msvc")] - const FILE: &str = "foo.pdb"; - - let path = PathBuf::from(pattern).join(FILE); - let path = path.to_str().expect("expected utf-8 path"); - glob::glob(path) - .expect("expected glob to run") - .into_iter() - .collect::>>() -} - -#[cargo_test] -fn clean_release() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build --release").run(); - - p.cargo("clean -p foo").run(); - p.cargo("build --release").with_stdout("").run(); - - p.cargo("clean -p foo --release").run(); - p.cargo("build --release") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run(); - - p.cargo("build").run(); - - p.cargo("clean").arg("--release").run(); - assert!(p.build_dir().is_dir()); - assert!(p.build_dir().join("debug").is_dir()); - assert!(!p.build_dir().join("release").is_dir()); -} - -#[cargo_test] -fn clean_doc() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("doc").run(); - - let doc_path = &p.build_dir().join("doc"); - - assert!(doc_path.is_dir()); - - p.cargo("clean --doc").run(); - - assert!(!doc_path.is_dir()); - assert!(p.build_dir().is_dir()); -} - -#[cargo_test] -fn build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "build.rs", - r#" - use std::path::PathBuf; - use std::env; - - fn main() { - let out = PathBuf::from(env::var_os("OUT_DIR").unwrap()); - if env::var("FIRST").is_ok() { - std::fs::File::create(out.join("out")).unwrap(); - } else { - assert!(!out.join("out").exists()); - } - } - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build").env("FIRST", "1").run(); - p.cargo("clean -p foo").run(); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] build.rs [..]` -[RUNNING] `[..]build-script-build` -[RUNNING] `rustc [..] src/main.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn clean_git() { - let git = git::new("dep", |project| { - project - .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) - .file("src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - dep = {{ git = '{}' }} - "#, - git.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - p.cargo("clean -p dep").with_stdout("").run(); - p.cargo("build").run(); -} - -#[cargo_test] -fn registry() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.1.0").publish(); - - p.cargo("build").run(); - p.cargo("clean -p bar").with_stdout("").run(); - p.cargo("build").run(); -} - -#[cargo_test] -fn clean_verbose() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = "0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.1.0").publish(); - - p.cargo("build").run(); - p.cargo("clean -p bar --verbose") - .with_stderr( - "\ -[REMOVING] [..] -[REMOVING] [..] -[REMOVING] [..] -[REMOVING] [..] -", - ) - .run(); - p.cargo("build").run(); -} - -#[cargo_test] -fn clean_remove_rlib_rmeta() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - assert!(p.target_debug_dir().join("libfoo.rlib").exists()); - let rmeta = p.glob("target/debug/deps/*.rmeta").next().unwrap().unwrap(); - assert!(rmeta.exists()); - p.cargo("clean -p foo").run(); - assert!(!p.target_debug_dir().join("libfoo.rlib").exists()); - assert!(!rmeta.exists()); -} - -#[cargo_test] -fn package_cleans_all_the_things() { - // -p cleans everything - // Use dashes everywhere to make sure dash/underscore stuff is handled. - for crate_type in &["rlib", "dylib", "cdylib", "staticlib", "proc-macro"] { - // Try each crate type individually since the behavior changes when - // they are combined. - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo-bar" - version = "0.1.0" - - [lib] - crate-type = ["{}"] - "#, - crate_type - ), - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build").run(); - p.cargo("clean -p foo-bar").run(); - assert_all_clean(&p.build_dir()); - } - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo-bar" - version = "0.1.0" - edition = "2018" - - [lib] - crate-type = ["rlib", "dylib", "staticlib"] - - [[example]] - name = "foo-ex-rlib" - crate-type = ["rlib"] - test = true - - [[example]] - name = "foo-ex-cdylib" - crate-type = ["cdylib"] - test = true - - [[example]] - name = "foo-ex-bin" - test = true - "#, - ) - .file("src/lib.rs", "") - .file("src/lib/some-main.rs", "fn main() {}") - .file("src/bin/other-main.rs", "fn main() {}") - .file("examples/foo-ex-rlib.rs", "") - .file("examples/foo-ex-cdylib.rs", "") - .file("examples/foo-ex-bin.rs", "fn main() {}") - .file("tests/foo-test.rs", "") - .file("benches/foo-bench.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - p.cargo("build --all-targets") - .env("CARGO_INCREMENTAL", "1") - .run(); - p.cargo("test --all-targets") - .env("CARGO_INCREMENTAL", "1") - .run(); - p.cargo("check --all-targets") - .env("CARGO_INCREMENTAL", "1") - .run(); - p.cargo("clean -p foo-bar").run(); - assert_all_clean(&p.build_dir()); - - // Try some targets. - p.cargo("build --all-targets --target") - .arg(rustc_host()) - .run(); - p.cargo("clean -p foo-bar --target").arg(rustc_host()).run(); - assert_all_clean(&p.build_dir()); -} - -// Ensures that all files for the package have been deleted. -#[track_caller] -fn assert_all_clean(build_dir: &Path) { - let walker = walkdir::WalkDir::new(build_dir).into_iter(); - for entry in walker.filter_entry(|e| { - let path = e.path(); - // This is a known limitation, clean can't differentiate between - // the different build scripts from different packages. - !(path - .file_name() - .unwrap() - .to_str() - .unwrap() - .starts_with("build_script_build") - && path - .parent() - .unwrap() - .file_name() - .unwrap() - .to_str() - .unwrap() - == "incremental") - }) { - let entry = entry.unwrap(); - let path = entry.path(); - if let ".rustc_info.json" | ".cargo-lock" | "CACHEDIR.TAG" = - path.file_name().unwrap().to_str().unwrap() - { - continue; - } - if is_symlink(path) || path.is_file() { - panic!("{:?} was not cleaned", path); - } - } -} - -#[cargo_test] -fn clean_spec_multiple() { - // clean -p foo where foo matches multiple versions - Package::new("bar", "1.0.0").publish(); - Package::new("bar", "2.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar1 = {version="1.0", package="bar"} - bar2 = {version="2.0", package="bar"} - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - // Check suggestion for bad pkgid. - p.cargo("clean -p baz") - .with_status(101) - .with_stderr( - "\ -error: package ID specification `baz` did not match any packages - -Did you mean `bar`? -", - ) - .run(); - - p.cargo("clean -p bar:1.0.0") - .with_stderr( - "warning: version qualifier in `-p bar:1.0.0` is ignored, \ - cleaning all versions of `bar` found", - ) - .run(); - let mut walker = walkdir::WalkDir::new(p.build_dir()) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| { - let n = e.file_name().to_str().unwrap(); - n.starts_with("bar") || n.starts_with("libbar") - }); - if let Some(e) = walker.next() { - panic!("{:?} was not cleaned", e.path()); - } -} - -#[cargo_test] -fn clean_spec_reserved() { - // Clean when a target (like a test) has a reserved name. In this case, - // make sure `clean -p` doesn't delete the reserved directory `build` when - // there is a test named `build`. - Package::new("bar", "1.0.0") - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .file("tests/build.rs", "") - .build(); - - p.cargo("build --all-targets").run(); - assert!(p.target_debug_dir().join("build").is_dir()); - let build_test = p.glob("target/debug/deps/build-*").next().unwrap().unwrap(); - assert!(build_test.exists()); - // Tests are never "uplifted". - assert!(p.glob("target/debug/build-*").next().is_none()); - - p.cargo("clean -p foo").run(); - // Should not delete this. - assert!(p.target_debug_dir().join("build").is_dir()); - - // This should not rebuild bar. - p.cargo("build -v --all-targets") - .with_stderr( - "\ -[FRESH] bar v1.0.0 -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc [..] -[RUNNING] `rustc [..] -[RUNNING] `rustc [..] -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/collisions.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/collisions.rs deleted file mode 100644 index e9fd3085f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/collisions.rs +++ /dev/null @@ -1,546 +0,0 @@ -//! Tests for when multiple artifacts have the same output filename. -//! See https://github.com/rust-lang/cargo/issues/6313 for more details. -//! Ideally these should never happen, but I don't think we'll ever be able to -//! prevent all collisions. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, cross_compile, project}; -use std::env; - -#[cargo_test] -fn collision_dylib() { - // Path dependencies don't include metadata hash in filename for dylibs. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "1.0.0" - - [lib] - crate-type = ["dylib"] - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "1.0.0" - - [lib] - crate-type = ["dylib"] - name = "a" - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - // `j=1` is required because on Windows you'll get an error due to - // two processes writing to the file at the same time. - p.cargo("build -j=1") - .with_stderr_contains(&format!("\ -[WARNING] output filename collision. -The lib target `a` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the lib target `a` in package `a v1.0.0 ([..]/foo/a)`. -Colliding filename is: [..]/foo/target/debug/deps/{}a{} -The targets should have unique names. -Consider changing their names to be unique or compiling them separately. -This may become a hard error in the future; see . -", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX)) - .run(); -} - -#[cargo_test] -fn collision_example() { - // Examples in a workspace can easily collide. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "1.0.0")) - .file("a/examples/ex1.rs", "fn main() {}") - .file("b/Cargo.toml", &basic_manifest("b", "1.0.0")) - .file("b/examples/ex1.rs", "fn main() {}") - .build(); - - // `j=1` is required because on Windows you'll get an error due to - // two processes writing to the file at the same time. - p.cargo("build --examples -j=1") - .with_stderr_contains("\ -[WARNING] output filename collision. -The example target `ex1` in package `b v1.0.0 ([..]/foo/b)` has the same output filename as the example target `ex1` in package `a v1.0.0 ([..]/foo/a)`. -Colliding filename is: [..]/foo/target/debug/examples/ex1[EXE] -The targets should have unique names. -Consider changing their names to be unique or compiling them separately. -This may become a hard error in the future; see . -") - .run(); -} - -#[cargo_test] -// --out-dir and examples are currently broken on MSVC and apple. -// See https://github.com/rust-lang/cargo/issues/7493 -#[cfg_attr(any(target_env = "msvc", target_vendor = "apple"), ignore)] -fn collision_export() { - // `--out-dir` combines some things which can cause conflicts. - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("examples/foo.rs", "fn main() {}") - .file("src/main.rs", "fn main() {}") - .build(); - - // -j1 to avoid issues with two processes writing to the same file at the - // same time. - p.cargo("build -j1 --out-dir=out -Z unstable-options --bins --examples") - .masquerade_as_nightly_cargo() - .with_stderr_contains("\ -[WARNING] `--out-dir` filename collision. -The example target `foo` in package `foo v1.0.0 ([..]/foo)` has the same output filename as the bin target `foo` in package `foo v1.0.0 ([..]/foo)`. -Colliding filename is: [..]/foo/out/foo[EXE] -The exported filenames should be unique. -Consider changing their names to be unique or compiling them separately. -This may become a hard error in the future; see . -") - .run(); -} - -#[cargo_test] -fn collision_doc() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - foo2 = { path = "foo2" } - "#, - ) - .file("src/lib.rs", "") - .file( - "foo2/Cargo.toml", - r#" - [package] - name = "foo2" - version = "0.1.0" - - [lib] - name = "foo" - "#, - ) - .file("foo2/src/lib.rs", "") - .build(); - - p.cargo("doc -j=1") - .with_stderr_contains( - "\ -[WARNING] output filename collision. -The lib target `foo` in package `foo2 v0.1.0 ([..]/foo/foo2)` has the same output \ -filename as the lib target `foo` in package `foo v0.1.0 ([..]/foo)`. -Colliding filename is: [..]/foo/target/doc/foo/index.html -The targets should have unique names. -This is a known bug where multiple crates with the same name use -the same path; see . -", - ) - .run(); -} - -#[cargo_test] -fn collision_doc_multiple_versions() { - // Multiple versions of the same package. - Package::new("old-dep", "1.0.0").publish(); - Package::new("bar", "1.0.0").dep("old-dep", "1.0").publish(); - // Note that this removes "old-dep". Just checking what happens when there - // are orphans. - Package::new("bar", "2.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - bar2 = { package="bar", version="2.0" } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Should only document bar 2.0, should not document old-dep. - p.cargo("doc") - .with_stderr_unordered( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v2.0.0 [..] -[DOWNLOADED] bar v1.0.0 [..] -[DOWNLOADED] old-dep v1.0.0 [..] -[CHECKING] old-dep v1.0.0 -[CHECKING] bar v2.0.0 -[CHECKING] bar v1.0.0 -[DOCUMENTING] bar v2.0.0 -[FINISHED] [..] -[DOCUMENTING] foo v0.1.0 [..] -", - ) - .run(); -} - -#[cargo_test] -fn collision_doc_host_target_feature_split() { - // Same dependency built twice due to different features. - // - // foo v0.1.0 - // โ”œโ”€โ”€ common v1.0.0 - // โ”‚ โ””โ”€โ”€ common-dep v1.0.0 - // โ””โ”€โ”€ pm v0.1.0 (proc-macro) - // โ””โ”€โ”€ common v1.0.0 - // โ””โ”€โ”€ common-dep v1.0.0 - // [build-dependencies] - // โ””โ”€โ”€ common-dep v1.0.0 - // - // Here `common` and `common-dep` are built twice. `common-dep` has - // different features for host versus target. - Package::new("common-dep", "1.0.0") - .feature("bdep-feat", &[]) - .file( - "src/lib.rs", - r#" - /// Some doc - pub fn f() {} - - /// Another doc - #[cfg(feature = "bdep-feat")] - pub fn bdep_func() {} - "#, - ) - .publish(); - Package::new("common", "1.0.0") - .dep("common-dep", "1.0") - .file( - "src/lib.rs", - r#" - /// Some doc - pub fn f() {} - "#, - ) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "2" - - [dependencies] - pm = { path = "pm" } - common = "1.0" - - [build-dependencies] - common-dep = { version = "1.0", features = ["bdep-feat"] } - "#, - ) - .file( - "src/lib.rs", - r#" - /// Some doc - pub fn f() {} - "#, - ) - .file("build.rs", "fn main() {}") - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - edition = "2018" - - [lib] - proc-macro = true - - [dependencies] - common = "1.0" - "#, - ) - .file( - "pm/src/lib.rs", - r#" - use proc_macro::TokenStream; - - /// Some doc - #[proc_macro] - pub fn pm(_input: TokenStream) -> TokenStream { - "".parse().unwrap() - } - "#, - ) - .build(); - - // No warnings, no duplicates, common and common-dep only documented once. - p.cargo("doc") - // Cannot check full output due to https://github.com/rust-lang/cargo/issues/9076 - .with_stderr_does_not_contain("[WARNING][..]") - .run(); - - assert!(p.build_dir().join("doc/common_dep/fn.f.html").exists()); - assert!(!p - .build_dir() - .join("doc/common_dep/fn.bdep_func.html") - .exists()); - assert!(p.build_dir().join("doc/common/fn.f.html").exists()); - assert!(p.build_dir().join("doc/pm/macro.pm.html").exists()); - assert!(p.build_dir().join("doc/foo/fn.f.html").exists()); -} - -#[cargo_test] -fn collision_doc_profile_split() { - // Same dependency built twice due to different profile settings. - Package::new("common", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - pm = { path = "pm" } - common = "1.0" - - [profile.dev] - opt-level = 2 - "#, - ) - .file("src/lib.rs", "") - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - - [dependencies] - common = "1.0" - - [lib] - proc-macro = true - "#, - ) - .file("pm/src/lib.rs", "") - .build(); - - // Just to verify that common is normally built twice. - p.cargo("build -v") - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] common v1.0.0 [..] -[COMPILING] common v1.0.0 -[RUNNING] `rustc --crate-name common [..] -[RUNNING] `rustc --crate-name common [..] -[COMPILING] pm v0.1.0 [..] -[RUNNING] `rustc --crate-name pm [..] -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo [..] -[FINISHED] [..] -", - ) - .run(); - - // Should only document common once, no warnings. - p.cargo("doc") - .with_stderr_unordered( - "\ -[CHECKING] common v1.0.0 -[DOCUMENTING] common v1.0.0 -[DOCUMENTING] pm v0.1.0 [..] -[DOCUMENTING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn collision_doc_sources() { - // Different sources with the same package. - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - bar2 = { path = "bar", package = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("doc -j=1") - .with_stderr_unordered( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.0 [..] -[WARNING] output filename collision. -The lib target `bar` in package `bar v1.0.0` has the same output filename as \ -the lib target `bar` in package `bar v1.0.0 ([..]/foo/bar)`. -Colliding filename is: [..]/foo/target/doc/bar/index.html -The targets should have unique names. -This is a known bug where multiple crates with the same name use -the same path; see . -[CHECKING] bar v1.0.0 [..] -[DOCUMENTING] bar v1.0.0 [..] -[DOCUMENTING] bar v1.0.0 -[CHECKING] bar v1.0.0 -[DOCUMENTING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn collision_doc_target() { - // collision in doc with --target, doesn't fail due to orphans - if cross_compile::disabled() { - return; - } - - Package::new("orphaned", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .dep("orphaned", "1.0") - .publish(); - Package::new("bar", "2.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar2 = { version = "2.0", package="bar" } - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("doc --target") - .arg(cross_compile::alternate()) - .with_stderr_unordered( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] orphaned v1.0.0 [..] -[DOWNLOADED] bar v2.0.0 [..] -[DOWNLOADED] bar v1.0.0 [..] -[CHECKING] orphaned v1.0.0 -[DOCUMENTING] bar v2.0.0 -[CHECKING] bar v2.0.0 -[CHECKING] bar v1.0.0 -[DOCUMENTING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn collision_with_root() { - // Check for a doc collision between a root package and a dependency. - // In this case, `foo-macro` comes from both the workspace and crates.io. - // This checks that the duplicate correction code doesn't choke on this - // by removing the root unit. - Package::new("foo-macro", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["abc", "foo-macro"] - "#, - ) - .file( - "abc/Cargo.toml", - r#" - [package] - name = "abc" - version = "1.0.0" - - [dependencies] - foo-macro = "1.0" - "#, - ) - .file("abc/src/lib.rs", "") - .file( - "foo-macro/Cargo.toml", - r#" - [package] - name = "foo-macro" - version = "1.0.0" - - [lib] - proc-macro = true - - [dependencies] - abc = {path="../abc"} - "#, - ) - .file("foo-macro/src/lib.rs", "") - .build(); - - p.cargo("doc -j=1") - .with_stderr_unordered("\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] foo-macro v1.0.0 [..] -warning: output filename collision. -The lib target `foo-macro` in package `foo-macro v1.0.0` has the same output filename as the lib target `foo-macro` in package `foo-macro v1.0.0 [..]`. -Colliding filename is: [CWD]/target/doc/foo_macro/index.html -The targets should have unique names. -This is a known bug where multiple crates with the same name use -the same path; see . -[CHECKING] foo-macro v1.0.0 -[DOCUMENTING] foo-macro v1.0.0 -[CHECKING] abc v1.0.0 [..] -[DOCUMENTING] foo-macro v1.0.0 [..] -[DOCUMENTING] abc v1.0.0 [..] -[FINISHED] [..] -") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/concurrent.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/concurrent.rs deleted file mode 100644 index 51d02f32b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/concurrent.rs +++ /dev/null @@ -1,509 +0,0 @@ -//! Tests for running multiple `cargo` processes at the same time. - -use std::fs; -use std::net::TcpListener; -use std::process::Stdio; -use std::sync::mpsc::channel; -use std::thread; -use std::{env, str}; - -use cargo_test_support::cargo_process; -use cargo_test_support::git; -use cargo_test_support::install::{assert_has_installed_exe, cargo_home}; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, execs, project, slow_cpu_multiplier}; - -fn pkg(name: &str, vers: &str) { - Package::new(name, vers) - .file("src/main.rs", "fn main() {{}}") - .publish(); -} - -#[cargo_test] -fn multiple_installs() { - let p = project() - .no_manifest() - .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0")) - .file("a/src/main.rs", "fn main() {}") - .file("b/Cargo.toml", &basic_manifest("bar", "0.0.0")) - .file("b/src/main.rs", "fn main() {}"); - let p = p.build(); - - let mut a = p.cargo("install").cwd("a").build_command(); - let mut b = p.cargo("install").cwd("b").build_command(); - - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - - let a = a.spawn().unwrap(); - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - execs().run_output(&a); - execs().run_output(&b); - - assert_has_installed_exe(cargo_home(), "foo"); - assert_has_installed_exe(cargo_home(), "bar"); -} - -#[cargo_test] -fn concurrent_installs() { - const LOCKED_BUILD: &str = "waiting for file lock on build directory"; - - pkg("foo", "0.0.1"); - pkg("bar", "0.0.1"); - - let mut a = cargo_process("install foo").build_command(); - let mut b = cargo_process("install bar").build_command(); - - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - - let a = a.spawn().unwrap(); - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - assert!(!str::from_utf8(&a.stderr).unwrap().contains(LOCKED_BUILD)); - assert!(!str::from_utf8(&b.stderr).unwrap().contains(LOCKED_BUILD)); - - execs().run_output(&a); - execs().run_output(&b); - - assert_has_installed_exe(cargo_home(), "foo"); - assert_has_installed_exe(cargo_home(), "bar"); -} - -#[cargo_test] -fn one_install_should_be_bad() { - let p = project() - .no_manifest() - .file("a/Cargo.toml", &basic_manifest("foo", "0.0.0")) - .file("a/src/main.rs", "fn main() {}") - .file("b/Cargo.toml", &basic_manifest("foo", "0.0.0")) - .file("b/src/main.rs", "fn main() {}"); - let p = p.build(); - - let mut a = p.cargo("install").cwd("a").build_command(); - let mut b = p.cargo("install").cwd("b").build_command(); - - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - - let a = a.spawn().unwrap(); - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - execs().run_output(&a); - execs().run_output(&b); - - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn multiple_registry_fetches() { - let mut pkg = Package::new("bar", "1.0.2"); - for i in 0..10 { - let name = format!("foo{}", i); - Package::new(&name, "1.0.0").publish(); - pkg.dep(&name, "*"); - } - pkg.publish(); - - let p = project() - .no_manifest() - .file( - "a/Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [dependencies] - bar = "*" - "#, - ) - .file("a/src/main.rs", "fn main() {}") - .file( - "b/Cargo.toml", - r#" - [package] - name = "bar" - authors = [] - version = "0.0.0" - - [dependencies] - bar = "*" - "#, - ) - .file("b/src/main.rs", "fn main() {}"); - let p = p.build(); - - let mut a = p.cargo("build").cwd("a").build_command(); - let mut b = p.cargo("build").cwd("b").build_command(); - - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - - let a = a.spawn().unwrap(); - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - execs().run_output(&a); - execs().run_output(&b); - - let suffix = env::consts::EXE_SUFFIX; - assert!(p - .root() - .join("a/target/debug") - .join(format!("foo{}", suffix)) - .is_file()); - assert!(p - .root() - .join("b/target/debug") - .join(format!("bar{}", suffix)) - .is_file()); -} - -#[cargo_test] -fn git_same_repo_different_tags() { - let a = git::new("dep", |project| { - project - .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) - .file("src/lib.rs", "pub fn tag1() {}") - }); - - let repo = git2::Repository::open(&a.root()).unwrap(); - git::tag(&repo, "tag1"); - - a.change_file("src/lib.rs", "pub fn tag2() {}"); - git::add(&repo); - git::commit(&repo); - git::tag(&repo, "tag2"); - - let p = project() - .no_manifest() - .file( - "a/Cargo.toml", - &format!( - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [dependencies] - dep = {{ git = '{}', tag = 'tag1' }} - "#, - a.url() - ), - ) - .file( - "a/src/main.rs", - "extern crate dep; fn main() { dep::tag1(); }", - ) - .file( - "b/Cargo.toml", - &format!( - r#" - [package] - name = "bar" - authors = [] - version = "0.0.0" - - [dependencies] - dep = {{ git = '{}', tag = 'tag2' }} - "#, - a.url() - ), - ) - .file( - "b/src/main.rs", - "extern crate dep; fn main() { dep::tag2(); }", - ); - let p = p.build(); - - let mut a = p.cargo("build -v").cwd("a").build_command(); - let mut b = p.cargo("build -v").cwd("b").build_command(); - - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - - let a = a.spawn().unwrap(); - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - execs().run_output(&a); - execs().run_output(&b); -} - -#[cargo_test] -fn git_same_branch_different_revs() { - let a = git::new("dep", |project| { - project - .file("Cargo.toml", &basic_manifest("dep", "0.5.0")) - .file("src/lib.rs", "pub fn f1() {}") - }); - - let p = project() - .no_manifest() - .file( - "a/Cargo.toml", - &format!( - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [dependencies] - dep = {{ git = '{}' }} - "#, - a.url() - ), - ) - .file( - "a/src/main.rs", - "extern crate dep; fn main() { dep::f1(); }", - ) - .file( - "b/Cargo.toml", - &format!( - r#" - [package] - name = "bar" - authors = [] - version = "0.0.0" - - [dependencies] - dep = {{ git = '{}' }} - "#, - a.url() - ), - ) - .file( - "b/src/main.rs", - "extern crate dep; fn main() { dep::f2(); }", - ); - let p = p.build(); - - // Generate a Cargo.lock pointing at the current rev, then clear out the - // target directory - p.cargo("build").cwd("a").run(); - fs::remove_dir_all(p.root().join("a/target")).unwrap(); - - // Make a new commit on the master branch - let repo = git2::Repository::open(&a.root()).unwrap(); - a.change_file("src/lib.rs", "pub fn f2() {}"); - git::add(&repo); - git::commit(&repo); - - // Now run both builds in parallel. The build of `b` should pick up the - // newest commit while the build of `a` should use the locked old commit. - let mut a = p.cargo("build").cwd("a").build_command(); - let mut b = p.cargo("build").cwd("b").build_command(); - - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - - let a = a.spawn().unwrap(); - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - execs().run_output(&a); - execs().run_output(&b); -} - -#[cargo_test] -fn same_project() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", ""); - let p = p.build(); - - let mut a = p.cargo("build").build_command(); - let mut b = p.cargo("build").build_command(); - - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - - let a = a.spawn().unwrap(); - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - execs().run_output(&a); - execs().run_output(&b); -} - -// Make sure that if Cargo dies while holding a lock that it's released and the -// next Cargo to come in will take over cleanly. -// older win versions don't support job objects, so skip test there -#[cargo_test] -#[cfg_attr(target_os = "windows", ignore)] -fn killing_cargo_releases_the_lock() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - build = "build.rs" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "build.rs", - r#" - use std::net::TcpStream; - - fn main() { - if std::env::var("A").is_ok() { - TcpStream::connect(&std::env::var("ADDR").unwrap()[..]) - .unwrap(); - std::thread::sleep(std::time::Duration::new(10, 0)); - } - } - "#, - ); - let p = p.build(); - - // Our build script will connect to our local TCP socket to inform us that - // it's started and that's how we know that `a` will have the lock - // when we kill it. - let l = TcpListener::bind("127.0.0.1:0").unwrap(); - let mut a = p.cargo("build").build_command(); - let mut b = p.cargo("build").build_command(); - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - a.env("ADDR", l.local_addr().unwrap().to_string()) - .env("A", "a"); - b.env("ADDR", l.local_addr().unwrap().to_string()) - .env_remove("A"); - - // Spawn `a`, wait for it to get to the build script (at which point the - // lock is held), then kill it. - let mut a = a.spawn().unwrap(); - l.accept().unwrap(); - a.kill().unwrap(); - - // Spawn `b`, then just finish the output of a/b the same way the above - // tests does. - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - // We killed `a`, so it shouldn't succeed, but `b` should have succeeded. - assert!(!a.status.success()); - execs().run_output(&b); -} - -#[cargo_test] -fn debug_release_ok() { - let p = project().file("src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build").run(); - fs::remove_dir_all(p.root().join("target")).unwrap(); - - let mut a = p.cargo("build").build_command(); - let mut b = p.cargo("build --release").build_command(); - a.stdout(Stdio::piped()).stderr(Stdio::piped()); - b.stdout(Stdio::piped()).stderr(Stdio::piped()); - let a = a.spawn().unwrap(); - let b = b.spawn().unwrap(); - let a = thread::spawn(move || a.wait_with_output().unwrap()); - let b = b.wait_with_output().unwrap(); - let a = a.join().unwrap(); - - execs() - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run_output(&a); - execs() - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 [..] -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run_output(&b); -} - -#[cargo_test] -fn no_deadlock_with_git_dependencies() { - let dep1 = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - .file("src/lib.rs", "") - }); - - let dep2 = git::new("dep2", |project| { - project - .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) - .file("src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - - [dependencies] - dep1 = {{ git = '{}' }} - dep2 = {{ git = '{}' }} - "#, - dep1.url(), - dep2.url() - ), - ) - .file("src/main.rs", "fn main() { }"); - let p = p.build(); - - let n_concurrent_builds = 5; - - let (tx, rx) = channel(); - for _ in 0..n_concurrent_builds { - let cmd = p - .cargo("build") - .build_command() - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn(); - let tx = tx.clone(); - thread::spawn(move || { - let result = cmd.unwrap().wait_with_output().unwrap(); - tx.send(result).unwrap() - }); - } - - for _ in 0..n_concurrent_builds { - let result = rx.recv_timeout(slow_cpu_multiplier(30)).expect("Deadlock!"); - execs().run_output(&result); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config.rs deleted file mode 100644 index e6576eee2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config.rs +++ /dev/null @@ -1,1500 +0,0 @@ -//! Tests for config settings. - -use cargo::core::{PackageIdSpec, Shell}; -use cargo::util::config::{self, Config, SslVersionConfig, StringList}; -use cargo::util::interning::InternedString; -use cargo::util::toml::{self, VecStringOrBool as VSOB}; -use cargo::CargoResult; -use cargo_test_support::compare; -use cargo_test_support::{panic_error, paths, project, symlink_supported, t}; -use serde::Deserialize; -use std::borrow::Borrow; -use std::collections::{BTreeMap, HashMap}; -use std::fs; -use std::io; -use std::os; -use std::path::{Path, PathBuf}; - -/// Helper for constructing a `Config` object. -pub struct ConfigBuilder { - env: HashMap, - unstable: Vec, - config_args: Vec, - cwd: Option, - enable_nightly_features: bool, -} - -impl ConfigBuilder { - pub fn new() -> ConfigBuilder { - ConfigBuilder { - env: HashMap::new(), - unstable: Vec::new(), - config_args: Vec::new(), - cwd: None, - enable_nightly_features: false, - } - } - - /// Passes a `-Z` flag. - pub fn unstable_flag(&mut self, s: impl Into) -> &mut Self { - self.unstable.push(s.into()); - self - } - - /// Sets an environment variable. - pub fn env(&mut self, key: impl Into, val: impl Into) -> &mut Self { - self.env.insert(key.into(), val.into()); - self - } - - /// Unconditionaly enable nightly features, even on stable channels. - pub fn nightly_features_allowed(&mut self, allowed: bool) -> &mut Self { - self.enable_nightly_features = allowed; - self - } - - /// Passes a `--config` flag. - pub fn config_arg(&mut self, arg: impl Into) -> &mut Self { - if !self.unstable.iter().any(|s| s == "unstable-options") { - // --config is current unstable - self.unstable_flag("unstable-options"); - } - self.config_args.push(arg.into()); - self - } - - /// Sets the current working directory where config files will be loaded. - pub fn cwd(&mut self, path: impl AsRef) -> &mut Self { - self.cwd = Some(paths::root().join(path.as_ref())); - self - } - - /// Creates the `Config`. - pub fn build(&self) -> Config { - self.build_err().unwrap() - } - - /// Creates the `Config`, returning a Result. - pub fn build_err(&self) -> CargoResult { - let output = Box::new(fs::File::create(paths::root().join("shell.out")).unwrap()); - let shell = Shell::from_write(output); - let cwd = self.cwd.clone().unwrap_or_else(|| paths::root()); - let homedir = paths::home(); - let mut config = Config::new(shell, cwd, homedir); - config.nightly_features_allowed = self.enable_nightly_features || !self.unstable.is_empty(); - config.set_env(self.env.clone()); - config.set_search_stop_path(paths::root()); - config.configure( - 0, - false, - None, - false, - false, - false, - &None, - &self.unstable, - &self.config_args, - )?; - Ok(config) - } -} - -fn new_config() -> Config { - ConfigBuilder::new().build() -} - -/// Read the output from Config. -pub fn read_output(config: Config) -> String { - drop(config); // Paranoid about flushing the file. - let path = paths::root().join("shell.out"); - fs::read_to_string(path).unwrap() -} - -#[cargo_test] -fn read_env_vars_for_config() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.0" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - fn main() { - assert_eq!(env::var("NUM_JOBS").unwrap(), "100"); - } - "#, - ) - .build(); - - p.cargo("build").env("CARGO_BUILD_JOBS", "100").run(); -} - -pub fn write_config(config: &str) { - write_config_at(paths::root().join(".cargo/config"), config); -} - -pub fn write_config_at(path: impl AsRef, contents: &str) { - let path = paths::root().join(path.as_ref()); - fs::create_dir_all(path.parent().unwrap()).unwrap(); - fs::write(path, contents).unwrap(); -} - -pub fn write_config_toml(config: &str) { - write_config_at(paths::root().join(".cargo/config.toml"), config); -} - -#[cfg(unix)] -fn symlink_file(target: &Path, link: &Path) -> io::Result<()> { - os::unix::fs::symlink(target, link) -} - -#[cfg(windows)] -fn symlink_file(target: &Path, link: &Path) -> io::Result<()> { - os::windows::fs::symlink_file(target, link) -} - -fn symlink_config_to_config_toml() { - let toml_path = paths::root().join(".cargo/config.toml"); - let symlink_path = paths::root().join(".cargo/config"); - t!(symlink_file(&toml_path, &symlink_path)); -} - -#[track_caller] -pub fn assert_error>(error: E, msgs: &str) { - let causes = error - .borrow() - .chain() - .enumerate() - .map(|(i, e)| { - if i == 0 { - e.to_string() - } else { - format!("Caused by:\n {}", e) - } - }) - .collect::>() - .join("\n\n"); - assert_match(msgs, &causes); -} - -#[track_caller] -pub fn assert_match(expected: &str, actual: &str) { - if let Err(e) = compare::match_exact(expected, actual, "output", "", None) { - panic_error("", e); - } -} - -#[cargo_test] -fn get_config() { - write_config( - "\ -[S] -f1 = 123 -", - ); - - let config = new_config(); - - #[derive(Debug, Deserialize, Eq, PartialEq)] - struct S { - f1: Option, - } - let s: S = config.get("S").unwrap(); - assert_eq!(s, S { f1: Some(123) }); - let config = ConfigBuilder::new().env("CARGO_S_F1", "456").build(); - let s: S = config.get("S").unwrap(); - assert_eq!(s, S { f1: Some(456) }); -} - -#[cargo_test] -fn config_works_with_extension() { - write_config_toml( - "\ -[foo] -f1 = 1 -", - ); - - let config = new_config(); - - assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); -} - -#[cargo_test] -fn config_ambiguous_filename_symlink_doesnt_warn() { - // Windows requires special permissions to create symlinks. - // If we don't have permission, just skip this test. - if !symlink_supported() { - return; - }; - - write_config_toml( - "\ -[foo] -f1 = 1 -", - ); - - symlink_config_to_config_toml(); - - let config = new_config(); - - assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); - - // It should NOT have warned for the symlink. - let output = read_output(config); - assert_eq!(output, ""); -} - -#[cargo_test] -fn config_ambiguous_filename() { - write_config( - "\ -[foo] -f1 = 1 -", - ); - - write_config_toml( - "\ -[foo] -f1 = 2 -", - ); - - let config = new_config(); - - // It should use the value from the one without the extension for - // backwards compatibility. - assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); - - // But it also should have warned. - let output = read_output(config); - let expected = "\ -warning: Both `[..]/.cargo/config` and `[..]/.cargo/config.toml` exist. Using `[..]/.cargo/config` -"; - assert_match(expected, &output); -} - -#[cargo_test] -fn config_unused_fields() { - write_config( - "\ -[S] -unused = 456 -", - ); - - let config = ConfigBuilder::new() - .env("CARGO_S_UNUSED2", "1") - .env("CARGO_S2_UNUSED", "2") - .build(); - - #[derive(Debug, Deserialize, Eq, PartialEq)] - struct S { - f1: Option, - } - // This prints a warning (verified below). - let s: S = config.get("S").unwrap(); - assert_eq!(s, S { f1: None }); - // This does not print anything, we cannot easily/reliably warn for - // environment variables. - let s: S = config.get("S2").unwrap(); - assert_eq!(s, S { f1: None }); - - // Verify the warnings. - let output = read_output(config); - let expected = "\ -warning: unused config key `S.unused` in `[..]/.cargo/config` -"; - assert_match(expected, &output); -} - -#[cargo_test] -fn config_load_toml_profile() { - write_config( - "\ -[profile.dev] -opt-level = 's' -lto = true -codegen-units=4 -debug = true -debug-assertions = true -rpath = true -panic = 'abort' -overflow-checks = true -incremental = true - -[profile.dev.build-override] -opt-level = 1 - -[profile.dev.package.bar] -codegen-units = 9 - -[profile.no-lto] -inherits = 'dev' -dir-name = 'without-lto' -lto = false -", - ); - - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .env("CARGO_PROFILE_DEV_CODEGEN_UNITS", "5") - .env("CARGO_PROFILE_DEV_BUILD_OVERRIDE_CODEGEN_UNITS", "11") - .env("CARGO_PROFILE_DEV_PACKAGE_env_CODEGEN_UNITS", "13") - .env("CARGO_PROFILE_DEV_PACKAGE_bar_OPT_LEVEL", "2") - .build(); - - // TODO: don't use actual `tomlprofile`. - let p: toml::TomlProfile = config.get("profile.dev").unwrap(); - let mut packages = BTreeMap::new(); - let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("bar").unwrap()); - let o_profile = toml::TomlProfile { - opt_level: Some(toml::TomlOptLevel("2".to_string())), - codegen_units: Some(9), - ..Default::default() - }; - packages.insert(key, o_profile); - let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("env").unwrap()); - let o_profile = toml::TomlProfile { - codegen_units: Some(13), - ..Default::default() - }; - packages.insert(key, o_profile); - - assert_eq!( - p, - toml::TomlProfile { - opt_level: Some(toml::TomlOptLevel("s".to_string())), - lto: Some(toml::StringOrBool::Bool(true)), - codegen_units: Some(5), - debug: Some(toml::U32OrBool::Bool(true)), - debug_assertions: Some(true), - rpath: Some(true), - panic: Some("abort".to_string()), - overflow_checks: Some(true), - incremental: Some(true), - package: Some(packages), - build_override: Some(Box::new(toml::TomlProfile { - opt_level: Some(toml::TomlOptLevel("1".to_string())), - codegen_units: Some(11), - ..Default::default() - })), - ..Default::default() - } - ); - - let p: toml::TomlProfile = config.get("profile.no-lto").unwrap(); - assert_eq!( - p, - toml::TomlProfile { - lto: Some(toml::StringOrBool::Bool(false)), - dir_name: Some(InternedString::new("without-lto")), - inherits: Some(InternedString::new("dev")), - ..Default::default() - } - ); -} - -#[cargo_test] -fn profile_env_var_prefix() { - // Check for a bug with collision on DEBUG vs DEBUG_ASSERTIONS. - let config = ConfigBuilder::new() - .env("CARGO_PROFILE_DEV_DEBUG_ASSERTIONS", "false") - .build(); - let p: toml::TomlProfile = config.get("profile.dev").unwrap(); - assert_eq!(p.debug_assertions, Some(false)); - assert_eq!(p.debug, None); - - let config = ConfigBuilder::new() - .env("CARGO_PROFILE_DEV_DEBUG", "1") - .build(); - let p: toml::TomlProfile = config.get("profile.dev").unwrap(); - assert_eq!(p.debug_assertions, None); - assert_eq!(p.debug, Some(toml::U32OrBool::U32(1))); - - let config = ConfigBuilder::new() - .env("CARGO_PROFILE_DEV_DEBUG_ASSERTIONS", "false") - .env("CARGO_PROFILE_DEV_DEBUG", "1") - .build(); - let p: toml::TomlProfile = config.get("profile.dev").unwrap(); - assert_eq!(p.debug_assertions, Some(false)); - assert_eq!(p.debug, Some(toml::U32OrBool::U32(1))); -} - -#[cargo_test] -fn config_deserialize_any() { - // Some tests to exercise deserialize_any for deserializers that need to - // be told the format. - write_config( - "\ -a = true -b = ['b'] -c = ['c'] -", - ); - - // advanced-env - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .env("CARGO_ENVB", "false") - .env("CARGO_C", "['d']") - .env("CARGO_ENVL", "['a', 'b']") - .build(); - assert_eq!(config.get::("a").unwrap(), VSOB::Bool(true)); - assert_eq!( - config.get::("b").unwrap(), - VSOB::VecString(vec!["b".to_string()]) - ); - assert_eq!( - config.get::("c").unwrap(), - VSOB::VecString(vec!["c".to_string(), "d".to_string()]) - ); - assert_eq!(config.get::("envb").unwrap(), VSOB::Bool(false)); - assert_eq!( - config.get::("envl").unwrap(), - VSOB::VecString(vec!["a".to_string(), "b".to_string()]) - ); - - // Demonstrate where merging logic isn't very smart. This could be improved. - let config = ConfigBuilder::new().env("CARGO_A", "x y").build(); - assert_error( - config.get::("a").unwrap_err(), - "\ -error in environment variable `CARGO_A`: could not load config key `a` - -Caused by: - invalid type: string \"x y\", expected a boolean or vector of strings", - ); - - // Normal env. - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .env("CARGO_B", "d e") - .env("CARGO_C", "f g") - .build(); - assert_eq!( - config.get::("b").unwrap(), - VSOB::VecString(vec!["b".to_string(), "d".to_string(), "e".to_string()]) - ); - assert_eq!( - config.get::("c").unwrap(), - VSOB::VecString(vec!["c".to_string(), "f".to_string(), "g".to_string()]) - ); - - // config-cli - // This test demonstrates that ConfigValue::merge isn't very smart. - // It would be nice if it was smarter. - let config = ConfigBuilder::new().config_arg("a = ['a']").build_err(); - assert_error( - config.unwrap_err(), - "\ -failed to merge --config key `a` into `[..]/.cargo/config` - -Caused by: - failed to merge config value from `--config cli option` into `[..]/.cargo/config`: \ -expected boolean, but found array", - ); - - // config-cli and advanced-env - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .config_arg("b=['clib']") - .config_arg("c=['clic']") - .env("CARGO_B", "env1 env2") - .env("CARGO_C", "['e1', 'e2']") - .build(); - assert_eq!( - config.get::("b").unwrap(), - VSOB::VecString(vec![ - "b".to_string(), - "clib".to_string(), - "env1".to_string(), - "env2".to_string() - ]) - ); - assert_eq!( - config.get::("c").unwrap(), - VSOB::VecString(vec![ - "c".to_string(), - "clic".to_string(), - "e1".to_string(), - "e2".to_string() - ]) - ); -} - -#[cargo_test] -fn config_toml_errors() { - write_config( - "\ -[profile.dev] -opt-level = 'foo' -", - ); - - let config = new_config(); - - assert_error( - config.get::("profile.dev").unwrap_err(), - "\ -error in [..]/.cargo/config: could not load config key `profile.dev.opt-level` - -Caused by: - must be `0`, `1`, `2`, `3`, `s` or `z`, but found the string: \"foo\"", - ); - - let config = ConfigBuilder::new() - .env("CARGO_PROFILE_DEV_OPT_LEVEL", "asdf") - .build(); - - assert_error( - config.get::("profile.dev").unwrap_err(), - "\ -error in environment variable `CARGO_PROFILE_DEV_OPT_LEVEL`: could not load config key `profile.dev.opt-level` - -Caused by: - must be `0`, `1`, `2`, `3`, `s` or `z`, but found the string: \"asdf\"", - ); -} - -#[cargo_test] -fn load_nested() { - write_config( - "\ -[nest.foo] -f1 = 1 -f2 = 2 -[nest.bar] -asdf = 3 -", - ); - - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .env("CARGO_NEST_foo_f2", "3") - .env("CARGO_NESTE_foo_f1", "1") - .env("CARGO_NESTE_foo_f2", "3") - .env("CARGO_NESTE_bar_asdf", "3") - .build(); - - type Nested = HashMap>; - - let n: Nested = config.get("nest").unwrap(); - let mut expected = HashMap::new(); - let mut foo = HashMap::new(); - foo.insert("f1".to_string(), 1); - foo.insert("f2".to_string(), 3); - expected.insert("foo".to_string(), foo); - let mut bar = HashMap::new(); - bar.insert("asdf".to_string(), 3); - expected.insert("bar".to_string(), bar); - assert_eq!(n, expected); - - let n: Nested = config.get("neste").unwrap(); - assert_eq!(n, expected); -} - -#[cargo_test] -fn get_errors() { - write_config( - "\ -[S] -f1 = 123 -f2 = 'asdf' -big = 123456789 -", - ); - - let config = ConfigBuilder::new() - .env("CARGO_E_S", "asdf") - .env("CARGO_E_BIG", "123456789") - .build(); - assert_error( - config.get::("foo").unwrap_err(), - "missing config key `foo`", - ); - assert_error( - config.get::("foo.bar").unwrap_err(), - "missing config key `foo.bar`", - ); - assert_error( - config.get::("S.f2").unwrap_err(), - "error in [..]/.cargo/config: `S.f2` expected an integer, but found a string", - ); - assert_error( - config.get::("S.big").unwrap_err(), - "\ -error in [..].cargo/config: could not load config key `S.big` - -Caused by: - invalid value: integer `123456789`, expected u8", - ); - - // Environment variable type errors. - assert_error( - config.get::("e.s").unwrap_err(), - "error in environment variable `CARGO_E_S`: invalid digit found in string", - ); - assert_error( - config.get::("e.big").unwrap_err(), - "\ -error in environment variable `CARGO_E_BIG`: could not load config key `e.big` - -Caused by: - invalid value: integer `123456789`, expected i8", - ); - - #[derive(Debug, Deserialize)] - #[allow(dead_code)] - struct S { - f1: i64, - f2: String, - f3: i64, - big: i64, - } - assert_error(config.get::("S").unwrap_err(), "missing field `f3`"); -} - -#[cargo_test] -fn config_get_option() { - write_config( - "\ -[foo] -f1 = 1 -", - ); - - let config = ConfigBuilder::new().env("CARGO_BAR_ASDF", "3").build(); - - assert_eq!(config.get::>("a").unwrap(), None); - assert_eq!(config.get::>("a.b").unwrap(), None); - assert_eq!(config.get::>("foo.f1").unwrap(), Some(1)); - assert_eq!(config.get::>("bar.asdf").unwrap(), Some(3)); - assert_eq!(config.get::>("bar.zzzz").unwrap(), None); -} - -#[cargo_test] -fn config_bad_toml() { - write_config("asdf"); - let config = new_config(); - assert_error( - config.get::("foo").unwrap_err(), - "\ -could not load Cargo configuration - -Caused by: - could not parse TOML configuration in `[..]/.cargo/config` - -Caused by: - could not parse input as TOML - -Caused by: - expected an equals, found eof at line 1 column 5", - ); -} - -#[cargo_test] -fn config_get_list() { - write_config( - "\ -l1 = [] -l2 = ['one', 'two'] -l3 = 123 -l4 = ['one', 'two'] - -[nested] -l = ['x'] - -[nested2] -l = ['y'] - -[nested-empty] -", - ); - - type L = Vec; - - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .env("CARGO_L4", "['three', 'four']") - .env("CARGO_L5", "['a']") - .env("CARGO_ENV_EMPTY", "[]") - .env("CARGO_ENV_BLANK", "") - .env("CARGO_ENV_NUM", "1") - .env("CARGO_ENV_NUM_LIST", "[1]") - .env("CARGO_ENV_TEXT", "asdf") - .env("CARGO_LEPAIR", "['a', 'b']") - .env("CARGO_NESTED2_L", "['z']") - .env("CARGO_NESTEDE_L", "['env']") - .env("CARGO_BAD_ENV", "[zzz]") - .build(); - - assert_eq!(config.get::("unset").unwrap(), vec![] as Vec); - assert_eq!(config.get::("l1").unwrap(), vec![] as Vec); - assert_eq!(config.get::("l2").unwrap(), vec!["one", "two"]); - assert_error( - config.get::("l3").unwrap_err(), - "\ -invalid configuration for key `l3` -expected a list, but found a integer for `l3` in [..]/.cargo/config", - ); - assert_eq!( - config.get::("l4").unwrap(), - vec!["one", "two", "three", "four"] - ); - assert_eq!(config.get::("l5").unwrap(), vec!["a"]); - assert_eq!(config.get::("env-empty").unwrap(), vec![] as Vec); - assert_eq!(config.get::("env-blank").unwrap(), vec![] as Vec); - assert_eq!(config.get::("env-num").unwrap(), vec!["1".to_string()]); - assert_error( - config.get::("env-num-list").unwrap_err(), - "error in environment variable `CARGO_ENV_NUM_LIST`: \ - expected string, found integer", - ); - assert_eq!( - config.get::("env-text").unwrap(), - vec!["asdf".to_string()] - ); - // "invalid number" here isn't the best error, but I think it's just toml.rs. - assert_error( - config.get::("bad-env").unwrap_err(), - "error in environment variable `CARGO_BAD_ENV`: \ - could not parse TOML list: invalid TOML value, did you mean to use a quoted string? at line 1 column 8", - ); - - // Try some other sequence-like types. - assert_eq!( - config - .get::<(String, String, String, String)>("l4") - .unwrap(), - ( - "one".to_string(), - "two".to_string(), - "three".to_string(), - "four".to_string() - ) - ); - assert_eq!(config.get::<(String,)>("l5").unwrap(), ("a".to_string(),)); - - // Tuple struct - #[derive(Debug, Deserialize, Eq, PartialEq)] - struct TupS(String, String); - assert_eq!( - config.get::("lepair").unwrap(), - TupS("a".to_string(), "b".to_string()) - ); - - // Nested with an option. - #[derive(Debug, Deserialize, Eq, PartialEq)] - struct S { - l: Option>, - } - assert_eq!(config.get::("nested-empty").unwrap(), S { l: None }); - assert_eq!( - config.get::("nested").unwrap(), - S { - l: Some(vec!["x".to_string()]), - } - ); - assert_eq!( - config.get::("nested2").unwrap(), - S { - l: Some(vec!["y".to_string(), "z".to_string()]), - } - ); - assert_eq!( - config.get::("nestede").unwrap(), - S { - l: Some(vec!["env".to_string()]), - } - ); -} - -#[cargo_test] -fn config_get_other_types() { - write_config( - "\ -ns = 123 -ns2 = 456 -", - ); - - let config = ConfigBuilder::new() - .env("CARGO_NSE", "987") - .env("CARGO_NS2", "654") - .build(); - - #[derive(Debug, Deserialize, Eq, PartialEq)] - #[serde(transparent)] - struct NewS(i32); - assert_eq!(config.get::("ns").unwrap(), NewS(123)); - assert_eq!(config.get::("ns2").unwrap(), NewS(654)); - assert_eq!(config.get::("nse").unwrap(), NewS(987)); - assert_error( - config.get::("unset").unwrap_err(), - "missing config key `unset`", - ); -} - -#[cargo_test] -fn config_relative_path() { - write_config(&format!( - "\ -p1 = 'foo/bar' -p2 = '../abc' -p3 = 'b/c' -abs = '{}' -", - paths::home().display(), - )); - - let config = ConfigBuilder::new() - .env("CARGO_EPATH", "a/b") - .env("CARGO_P3", "d/e") - .build(); - - assert_eq!( - config - .get::("p1") - .unwrap() - .resolve_path(&config), - paths::root().join("foo/bar") - ); - assert_eq!( - config - .get::("p2") - .unwrap() - .resolve_path(&config), - paths::root().join("../abc") - ); - assert_eq!( - config - .get::("p3") - .unwrap() - .resolve_path(&config), - paths::root().join("d/e") - ); - assert_eq!( - config - .get::("abs") - .unwrap() - .resolve_path(&config), - paths::home() - ); - assert_eq!( - config - .get::("epath") - .unwrap() - .resolve_path(&config), - paths::root().join("a/b") - ); -} - -#[cargo_test] -fn config_get_integers() { - write_config( - "\ -npos = 123456789 -nneg = -123456789 -i64max = 9223372036854775807 -", - ); - - let config = ConfigBuilder::new() - .env("CARGO_EPOS", "123456789") - .env("CARGO_ENEG", "-1") - .env("CARGO_EI64MAX", "9223372036854775807") - .build(); - - assert_eq!( - config.get::("i64max").unwrap(), - 9_223_372_036_854_775_807 - ); - assert_eq!( - config.get::("i64max").unwrap(), - 9_223_372_036_854_775_807 - ); - assert_eq!( - config.get::("ei64max").unwrap(), - 9_223_372_036_854_775_807 - ); - assert_eq!( - config.get::("ei64max").unwrap(), - 9_223_372_036_854_775_807 - ); - - assert_error( - config.get::("nneg").unwrap_err(), - "\ -error in [..].cargo/config: could not load config key `nneg` - -Caused by: - invalid value: integer `-123456789`, expected u32", - ); - assert_error( - config.get::("eneg").unwrap_err(), - "\ -error in environment variable `CARGO_ENEG`: could not load config key `eneg` - -Caused by: - invalid value: integer `-1`, expected u32", - ); - assert_error( - config.get::("npos").unwrap_err(), - "\ -error in [..].cargo/config: could not load config key `npos` - -Caused by: - invalid value: integer `123456789`, expected i8", - ); - assert_error( - config.get::("epos").unwrap_err(), - "\ -error in environment variable `CARGO_EPOS`: could not load config key `epos` - -Caused by: - invalid value: integer `123456789`, expected i8", - ); -} - -#[cargo_test] -fn config_get_ssl_version_missing() { - write_config( - "\ -[http] -hello = 'world' -", - ); - - let config = new_config(); - - assert!(config - .get::>("http.ssl-version") - .unwrap() - .is_none()); -} - -#[cargo_test] -fn config_get_ssl_version_single() { - write_config( - "\ -[http] -ssl-version = 'tlsv1.2' -", - ); - - let config = new_config(); - - let a = config - .get::>("http.ssl-version") - .unwrap() - .unwrap(); - match a { - SslVersionConfig::Single(v) => assert_eq!(&v, "tlsv1.2"), - SslVersionConfig::Range(_) => panic!("Did not expect ssl version min/max."), - }; -} - -#[cargo_test] -fn config_get_ssl_version_min_max() { - write_config( - "\ -[http] -ssl-version.min = 'tlsv1.2' -ssl-version.max = 'tlsv1.3' -", - ); - - let config = new_config(); - - let a = config - .get::>("http.ssl-version") - .unwrap() - .unwrap(); - match a { - SslVersionConfig::Single(_) => panic!("Did not expect exact ssl version."), - SslVersionConfig::Range(range) => { - assert_eq!(range.min, Some(String::from("tlsv1.2"))); - assert_eq!(range.max, Some(String::from("tlsv1.3"))); - } - }; -} - -#[cargo_test] -fn config_get_ssl_version_both_forms_configured() { - // this is not allowed - write_config( - "\ -[http] -ssl-version = 'tlsv1.1' -ssl-version.min = 'tlsv1.2' -ssl-version.max = 'tlsv1.3' -", - ); - - let config = new_config(); - - assert_error( - config - .get::("http.ssl-version") - .unwrap_err(), - "\ -could not load Cargo configuration - -Caused by: - could not parse TOML configuration in `[..]/.cargo/config` - -Caused by: - could not parse input as TOML - -Caused by: - dotted key attempted to extend non-table type at line 2 column 15", - ); - assert!(config - .get::>("http.ssl-version") - .unwrap() - .is_none()); -} - -#[cargo_test] -/// Assert that unstable options can be configured with the `unstable` table in -/// cargo config files -fn unstable_table_notation() { - write_config( - "\ -[unstable] -print-im-a-teapot = true -", - ); - let config = ConfigBuilder::new().nightly_features_allowed(true).build(); - assert_eq!(config.cli_unstable().print_im_a_teapot, true); -} - -#[cargo_test] -/// Assert that dotted notation works for configuring unstable options -fn unstable_dotted_notation() { - write_config( - "\ -unstable.print-im-a-teapot = true -", - ); - let config = ConfigBuilder::new().nightly_features_allowed(true).build(); - assert_eq!(config.cli_unstable().print_im_a_teapot, true); -} - -#[cargo_test] -/// Assert that Zflags on the CLI take precedence over those from config -fn unstable_cli_precedence() { - write_config( - "\ -unstable.print-im-a-teapot = true -", - ); - let config = ConfigBuilder::new().nightly_features_allowed(true).build(); - assert_eq!(config.cli_unstable().print_im_a_teapot, true); - - let config = ConfigBuilder::new() - .unstable_flag("print-im-a-teapot=no") - .build(); - assert_eq!(config.cli_unstable().print_im_a_teapot, false); -} - -#[cargo_test] -/// Assert that atempting to set an unstable flag that doesn't exist via config -/// is ignored on stable -fn unstable_invalid_flag_ignored_on_stable() { - write_config( - "\ -unstable.an-invalid-flag = 'yes' -", - ); - assert!(ConfigBuilder::new().build_err().is_ok()); -} - -#[cargo_test] -/// Assert that unstable options can be configured with the `unstable` table in -/// cargo config files -fn unstable_flags_ignored_on_stable() { - write_config( - "\ -[unstable] -print-im-a-teapot = true -", - ); - // Enforce stable channel even when testing on nightly. - let config = ConfigBuilder::new().nightly_features_allowed(false).build(); - assert_eq!(config.cli_unstable().print_im_a_teapot, false); -} - -#[cargo_test] -fn table_merge_failure() { - // Config::merge fails to merge entries in two tables. - write_config_at( - "foo/.cargo/config", - " - [table] - key = ['foo'] - ", - ); - write_config_at( - ".cargo/config", - " - [table] - key = 'bar' - ", - ); - - #[derive(Debug, Deserialize)] - #[allow(dead_code)] - struct Table { - key: StringList, - } - let config = ConfigBuilder::new().cwd("foo").build(); - assert_error( - config.get::("table").unwrap_err(), - "\ -could not load Cargo configuration - -Caused by: - failed to merge configuration at `[..]/.cargo/config` - -Caused by: - failed to merge key `table` between [..]/foo/.cargo/config and [..]/.cargo/config - -Caused by: - failed to merge key `key` between [..]/foo/.cargo/config and [..]/.cargo/config - -Caused by: - failed to merge config value from `[..]/.cargo/config` into `[..]/foo/.cargo/config`: \ - expected array, but found string", - ); -} - -#[cargo_test] -fn non_string_in_array() { - // Currently only strings are supported. - write_config("foo = [1, 2, 3]"); - let config = new_config(); - assert_error( - config.get::>("foo").unwrap_err(), - "\ -could not load Cargo configuration - -Caused by: - failed to load TOML configuration from `[..]/.cargo/config` - -Caused by: - failed to parse key `foo` - -Caused by: - expected string but found integer in list", - ); -} - -#[cargo_test] -fn struct_with_opt_inner_struct() { - // Struct with a key that is Option of another struct. - // Check that can be defined with environment variable. - #[derive(Deserialize)] - struct Inner { - value: Option, - } - #[derive(Deserialize)] - struct Foo { - inner: Option, - } - let config = ConfigBuilder::new() - .env("CARGO_FOO_INNER_VALUE", "12") - .build(); - let f: Foo = config.get("foo").unwrap(); - assert_eq!(f.inner.unwrap().value.unwrap(), 12); -} - -#[cargo_test] -fn struct_with_default_inner_struct() { - // Struct with serde defaults. - // Check that can be defined with environment variable. - #[derive(Deserialize, Default)] - #[serde(default)] - struct Inner { - value: i32, - } - #[derive(Deserialize, Default)] - #[serde(default)] - struct Foo { - inner: Inner, - } - let config = ConfigBuilder::new() - .env("CARGO_FOO_INNER_VALUE", "12") - .build(); - let f: Foo = config.get("foo").unwrap(); - assert_eq!(f.inner.value, 12); -} - -#[cargo_test] -fn overlapping_env_config() { - // Issue where one key is a prefix of another. - #[derive(Deserialize)] - #[serde(rename_all = "kebab-case")] - struct Ambig { - debug: Option, - debug_assertions: Option, - } - let config = ConfigBuilder::new() - .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true") - .build(); - - let s: Ambig = config.get("ambig").unwrap(); - assert_eq!(s.debug_assertions, Some(true)); - assert_eq!(s.debug, None); - - let config = ConfigBuilder::new().env("CARGO_AMBIG_DEBUG", "0").build(); - let s: Ambig = config.get("ambig").unwrap(); - assert_eq!(s.debug_assertions, None); - assert_eq!(s.debug, Some(0)); - - let config = ConfigBuilder::new() - .env("CARGO_AMBIG_DEBUG", "1") - .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true") - .build(); - let s: Ambig = config.get("ambig").unwrap(); - assert_eq!(s.debug_assertions, Some(true)); - assert_eq!(s.debug, Some(1)); -} - -#[cargo_test] -fn overlapping_env_with_defaults_errors_out() { - // Issue where one key is a prefix of another. - // This is a limitation of mapping environment variables on to a hierarchy. - // Check that we error out when we hit ambiguity in this way, rather than - // the more-surprising defaulting through. - // If, in the future, we can handle this more correctly, feel free to delete - // this test. - #[derive(Deserialize, Default)] - #[serde(default, rename_all = "kebab-case")] - struct Ambig { - debug: u32, - debug_assertions: bool, - } - let config = ConfigBuilder::new() - .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true") - .build(); - let err = config.get::("ambig").err().unwrap(); - assert!(format!("{}", err).contains("missing config key `ambig.debug`")); - - let config = ConfigBuilder::new().env("CARGO_AMBIG_DEBUG", "5").build(); - let s: Ambig = config.get("ambig").unwrap(); - assert_eq!(s.debug_assertions, bool::default()); - assert_eq!(s.debug, 5); - - let config = ConfigBuilder::new() - .env("CARGO_AMBIG_DEBUG", "1") - .env("CARGO_AMBIG_DEBUG_ASSERTIONS", "true") - .build(); - let s: Ambig = config.get("ambig").unwrap(); - assert_eq!(s.debug_assertions, true); - assert_eq!(s.debug, 1); -} - -#[cargo_test] -fn struct_with_overlapping_inner_struct_and_defaults() { - // Struct with serde defaults. - // Check that can be defined with environment variable. - #[derive(Deserialize, Default)] - #[serde(default)] - struct Inner { - value: i32, - } - - // Containing struct with a prefix of inner - // - // This is a limitation of mapping environment variables on to a hierarchy. - // Check that we error out when we hit ambiguity in this way, rather than - // the more-surprising defaulting through. - // If, in the future, we can handle this more correctly, feel free to delete - // this case. - #[derive(Deserialize, Default)] - #[serde(default)] - struct PrefixContainer { - inn: bool, - inner: Inner, - } - let config = ConfigBuilder::new() - .env("CARGO_PREFIXCONTAINER_INNER_VALUE", "12") - .build(); - let err = config - .get::("prefixcontainer") - .err() - .unwrap(); - assert!(format!("{}", err).contains("missing config key `prefixcontainer.inn`")); - let config = ConfigBuilder::new() - .env("CARGO_PREFIXCONTAINER_INNER_VALUE", "12") - .env("CARGO_PREFIXCONTAINER_INN", "true") - .build(); - let f: PrefixContainer = config.get("prefixcontainer").unwrap(); - assert_eq!(f.inner.value, 12); - assert_eq!(f.inn, true); - - // Containing struct where the inner value's field is a prefix of another - // - // This is a limitation of mapping environment variables on to a hierarchy. - // Check that we error out when we hit ambiguity in this way, rather than - // the more-surprising defaulting through. - // If, in the future, we can handle this more correctly, feel free to delete - // this case. - #[derive(Deserialize, Default)] - #[serde(default)] - struct InversePrefixContainer { - inner_field: bool, - inner: Inner, - } - let config = ConfigBuilder::new() - .env("CARGO_INVERSEPREFIXCONTAINER_INNER_VALUE", "12") - .build(); - let f: InversePrefixContainer = config.get("inverseprefixcontainer").unwrap(); - assert_eq!(f.inner_field, bool::default()); - assert_eq!(f.inner.value, 12); -} - -#[cargo_test] -fn string_list_tricky_env() { - // Make sure StringList handles typed env values. - let config = ConfigBuilder::new() - .env("CARGO_KEY1", "123") - .env("CARGO_KEY2", "true") - .env("CARGO_KEY3", "1 2") - .build(); - let x = config.get::("key1").unwrap(); - assert_eq!(x.as_slice(), &["123".to_string()]); - let x = config.get::("key2").unwrap(); - assert_eq!(x.as_slice(), &["true".to_string()]); - let x = config.get::("key3").unwrap(); - assert_eq!(x.as_slice(), &["1".to_string(), "2".to_string()]); -} - -#[cargo_test] -fn string_list_wrong_type() { - // What happens if StringList is given then wrong type. - write_config("some_list = 123"); - let config = ConfigBuilder::new().build(); - assert_error( - config.get::("some_list").unwrap_err(), - "\ -invalid configuration for key `some_list` -expected a string or array of strings, but found a integer for `some_list` in [..]/.cargo/config", - ); - - write_config("some_list = \"1 2\""); - let config = ConfigBuilder::new().build(); - let x = config.get::("some_list").unwrap(); - assert_eq!(x.as_slice(), &["1".to_string(), "2".to_string()]); -} - -#[cargo_test] -fn string_list_advanced_env() { - // StringList with advanced env. - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .env("CARGO_KEY1", "[]") - .env("CARGO_KEY2", "['1 2', '3']") - .env("CARGO_KEY3", "[123]") - .build(); - let x = config.get::("key1").unwrap(); - assert_eq!(x.as_slice(), &[] as &[String]); - let x = config.get::("key2").unwrap(); - assert_eq!(x.as_slice(), &["1 2".to_string(), "3".to_string()]); - assert_error( - config.get::("key3").unwrap_err(), - "error in environment variable `CARGO_KEY3`: expected string, found integer", - ); -} - -#[cargo_test] -fn parse_strip_with_string() { - write_config( - "\ -[profile.release] -strip = 'debuginfo' -", - ); - - let config = new_config(); - - let p: toml::TomlProfile = config.get("profile.release").unwrap(); - let strip = p.strip.unwrap(); - assert_eq!(strip, toml::StringOrBool::String("debuginfo".to_string())); -} - -#[cargo_test] -fn cargo_target_empty_cfg() { - write_config( - "\ -[build] -target-dir = '' -", - ); - - let config = new_config(); - - assert_error( - config.target_dir().unwrap_err(), - "the target directory is set to an empty string in [..]/.cargo/config", - ); -} - -#[cargo_test] -fn cargo_target_empty_env() { - let project = project().build(); - - project.cargo("build") - .env("CARGO_TARGET_DIR", "") - .with_stderr("error: the target directory is set to an empty string in the `CARGO_TARGET_DIR` environment variable") - .with_status(101) - .run() -} - -#[cargo_test] -fn all_profile_options() { - // Check that all profile options can be serialized/deserialized. - let base_settings = toml::TomlProfile { - opt_level: Some(toml::TomlOptLevel("0".to_string())), - lto: Some(toml::StringOrBool::String("thin".to_string())), - codegen_backend: Some(InternedString::new("example")), - codegen_units: Some(123), - debug: Some(toml::U32OrBool::U32(1)), - split_debuginfo: Some("packed".to_string()), - debug_assertions: Some(true), - rpath: Some(true), - panic: Some("abort".to_string()), - overflow_checks: Some(true), - incremental: Some(true), - dir_name: Some(InternedString::new("dir_name")), - inherits: Some(InternedString::new("debug")), - strip: Some(toml::StringOrBool::String("symbols".to_string())), - package: None, - build_override: None, - }; - let mut overrides = BTreeMap::new(); - let key = toml::ProfilePackageSpec::Spec(PackageIdSpec::parse("foo").unwrap()); - overrides.insert(key, base_settings.clone()); - let profile = toml::TomlProfile { - build_override: Some(Box::new(base_settings.clone())), - package: Some(overrides), - ..base_settings - }; - let profile_toml = ::toml::to_string(&profile).unwrap(); - let roundtrip: toml::TomlProfile = ::toml::from_str(&profile_toml).unwrap(); - let roundtrip_toml = ::toml::to_string(&roundtrip).unwrap(); - compare::assert_match_exact(&profile_toml, &roundtrip_toml); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config_cli.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config_cli.rs deleted file mode 100644 index 745525012..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config_cli.rs +++ /dev/null @@ -1,350 +0,0 @@ -//! Tests for the --config CLI option. - -use super::config::{assert_error, assert_match, read_output, write_config, ConfigBuilder}; -use cargo::util::config::Definition; -use cargo_test_support::{paths, project}; -use std::fs; - -#[cargo_test] -fn config_gated() { - // Requires -Zunstable-options - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build --config --config build.jobs=1") - .with_status(101) - .with_stderr( - "\ -[ERROR] the `--config` flag is unstable, [..] -See [..] -See [..] -", - ) - .run(); -} - -#[cargo_test] -fn basic() { - // Simple example. - let config = ConfigBuilder::new().config_arg("foo='bar'").build(); - assert_eq!(config.get::("foo").unwrap(), "bar"); -} - -#[cargo_test] -fn cli_priority() { - // Command line takes priority over files and env vars. - write_config( - " - demo_list = ['a'] - [build] - jobs = 3 - rustc = 'file' - [term] - quiet = false - verbose = false - ", - ); - let config = ConfigBuilder::new().build(); - assert_eq!(config.get::("build.jobs").unwrap(), 3); - assert_eq!(config.get::("build.rustc").unwrap(), "file"); - assert_eq!(config.get::("term.quiet").unwrap(), false); - assert_eq!(config.get::("term.verbose").unwrap(), false); - - let config = ConfigBuilder::new() - .env("CARGO_BUILD_JOBS", "2") - .env("CARGO_BUILD_RUSTC", "env") - .env("CARGO_TERM_VERBOSE", "false") - .config_arg("build.jobs=1") - .config_arg("build.rustc='cli'") - .config_arg("term.verbose=true") - .build(); - assert_eq!(config.get::("build.jobs").unwrap(), 1); - assert_eq!(config.get::("build.rustc").unwrap(), "cli"); - assert_eq!(config.get::("term.verbose").unwrap(), true); - - // Setting both term.verbose and term.quiet is invalid and is tested - // in the run test suite. - let config = ConfigBuilder::new() - .env("CARGO_TERM_QUIET", "false") - .config_arg("term.quiet=true") - .build(); - assert_eq!(config.get::("term.quiet").unwrap(), true); -} - -#[cargo_test] -fn merges_array() { - // Array entries are appended. - write_config( - " - [build] - rustflags = ['--file'] - ", - ); - let config = ConfigBuilder::new() - .config_arg("build.rustflags = ['--cli']") - .build(); - assert_eq!( - config.get::>("build.rustflags").unwrap(), - ["--file", "--cli"] - ); - - // With normal env. - let config = ConfigBuilder::new() - .env("CARGO_BUILD_RUSTFLAGS", "--env1 --env2") - .config_arg("build.rustflags = ['--cli']") - .build(); - // The order of cli/env is a little questionable here, but would require - // much more complex merging logic. - assert_eq!( - config.get::>("build.rustflags").unwrap(), - ["--file", "--cli", "--env1", "--env2"] - ); - - // With advanced-env. - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .env("CARGO_BUILD_RUSTFLAGS", "--env") - .config_arg("build.rustflags = ['--cli']") - .build(); - assert_eq!( - config.get::>("build.rustflags").unwrap(), - ["--file", "--cli", "--env"] - ); - - // Merges multiple instances. - let config = ConfigBuilder::new() - .config_arg("build.rustflags=['--one']") - .config_arg("build.rustflags=['--two']") - .build(); - assert_eq!( - config.get::>("build.rustflags").unwrap(), - ["--file", "--one", "--two"] - ); -} - -#[cargo_test] -fn string_list_array() { - // Using the StringList type. - write_config( - " - [build] - rustflags = ['--file'] - ", - ); - let config = ConfigBuilder::new() - .config_arg("build.rustflags = ['--cli']") - .build(); - assert_eq!( - config - .get::("build.rustflags") - .unwrap() - .as_slice(), - ["--file", "--cli"] - ); - - // With normal env. - let config = ConfigBuilder::new() - .env("CARGO_BUILD_RUSTFLAGS", "--env1 --env2") - .config_arg("build.rustflags = ['--cli']") - .build(); - assert_eq!( - config - .get::("build.rustflags") - .unwrap() - .as_slice(), - ["--file", "--cli", "--env1", "--env2"] - ); - - // With advanced-env. - let config = ConfigBuilder::new() - .unstable_flag("advanced-env") - .env("CARGO_BUILD_RUSTFLAGS", "['--env']") - .config_arg("build.rustflags = ['--cli']") - .build(); - assert_eq!( - config - .get::("build.rustflags") - .unwrap() - .as_slice(), - ["--file", "--cli", "--env"] - ); -} - -#[cargo_test] -fn merges_table() { - // Tables are merged. - write_config( - " - [foo] - key1 = 1 - key2 = 2 - key3 = 3 - ", - ); - let config = ConfigBuilder::new() - .config_arg("foo.key2 = 4") - .config_arg("foo.key3 = 5") - .config_arg("foo.key4 = 6") - .build(); - assert_eq!(config.get::("foo.key1").unwrap(), 1); - assert_eq!(config.get::("foo.key2").unwrap(), 4); - assert_eq!(config.get::("foo.key3").unwrap(), 5); - assert_eq!(config.get::("foo.key4").unwrap(), 6); - - // With env. - let config = ConfigBuilder::new() - .env("CARGO_FOO_KEY3", "7") - .env("CARGO_FOO_KEY4", "8") - .env("CARGO_FOO_KEY5", "9") - .config_arg("foo.key2 = 4") - .config_arg("foo.key3 = 5") - .config_arg("foo.key4 = 6") - .build(); - assert_eq!(config.get::("foo.key1").unwrap(), 1); - assert_eq!(config.get::("foo.key2").unwrap(), 4); - assert_eq!(config.get::("foo.key3").unwrap(), 5); - assert_eq!(config.get::("foo.key4").unwrap(), 6); - assert_eq!(config.get::("foo.key5").unwrap(), 9); -} - -#[cargo_test] -fn merge_array_mixed_def_paths() { - // Merging of arrays with different def sites. - write_config( - " - paths = ['file'] - ", - ); - // Create a directory for CWD to differentiate the paths. - let somedir = paths::root().join("somedir"); - fs::create_dir(&somedir).unwrap(); - let config = ConfigBuilder::new() - .cwd(&somedir) - .config_arg("paths=['cli']") - // env is currently ignored for get_list() - .env("CARGO_PATHS", "env") - .build(); - let paths = config.get_list("paths").unwrap().unwrap(); - // The definition for the root value is somewhat arbitrary, but currently starts with the file because that is what is loaded first. - assert_eq!(paths.definition, Definition::Path(paths::root())); - assert_eq!(paths.val.len(), 2); - assert_eq!(paths.val[0].0, "file"); - assert_eq!(paths.val[0].1.root(&config), paths::root()); - assert_eq!(paths.val[1].0, "cli"); - assert_eq!(paths.val[1].1.root(&config), somedir); -} - -#[cargo_test] -fn unused_key() { - // Unused key passed on command line. - let config = ConfigBuilder::new() - .config_arg("build={jobs=1, unused=2}") - .build(); - - config.build_config().unwrap(); - let output = read_output(config); - let expected = "\ -warning: unused config key `build.unused` in `--config cli option` -"; - assert_match(expected, &output); -} - -#[cargo_test] -fn rerooted_remains() { - // Re-rooting keeps cli args. - let somedir = paths::root().join("somedir"); - fs::create_dir_all(somedir.join(".cargo")).unwrap(); - fs::write( - somedir.join(".cargo").join("config"), - " - a = 'file1' - b = 'file2' - ", - ) - .unwrap(); - let mut config = ConfigBuilder::new() - .cwd(&somedir) - .config_arg("b='cli1'") - .config_arg("c='cli2'") - .build(); - assert_eq!(config.get::("a").unwrap(), "file1"); - assert_eq!(config.get::("b").unwrap(), "cli1"); - assert_eq!(config.get::("c").unwrap(), "cli2"); - - config.reload_rooted_at(paths::root()).unwrap(); - - assert_eq!(config.get::>("a").unwrap(), None); - assert_eq!(config.get::("b").unwrap(), "cli1"); - assert_eq!(config.get::("c").unwrap(), "cli2"); -} - -#[cargo_test] -fn bad_parse() { - // Fail to TOML parse. - let config = ConfigBuilder::new().config_arg("abc").build_err(); - assert_error( - config.unwrap_err(), - "\ -failed to parse --config argument `abc` - -Caused by: - expected an equals, found eof at line 1 column 4", - ); -} - -#[cargo_test] -fn too_many_values() { - // Currently restricted to only 1 value. - let config = ConfigBuilder::new().config_arg("a=1\nb=2").build_err(); - assert_error( - config.unwrap_err(), - "\ ---config argument `a=1 -b=2` expected exactly one key=value pair, got 2 keys", - ); - - let config = ConfigBuilder::new().config_arg("").build_err(); - assert_error( - config.unwrap_err(), - "\ - --config argument `` expected exactly one key=value pair, got 0 keys", - ); -} - -#[cargo_test] -fn bad_cv_convert() { - // ConfigValue does not support all TOML types. - let config = ConfigBuilder::new().config_arg("a=2019-12-01").build_err(); - assert_error( - config.unwrap_err(), - "\ -failed to convert --config argument `a=2019-12-01` - -Caused by: - failed to parse key `a` - -Caused by: - found TOML configuration value of unknown type `datetime`", - ); -} - -#[cargo_test] -fn fail_to_merge_multiple_args() { - // Error message when multiple args fail to merge. - let config = ConfigBuilder::new() - .config_arg("foo='a'") - .config_arg("foo=['a']") - .build_err(); - // This is a little repetitive, but hopefully the user can figure it out. - assert_error( - config.unwrap_err(), - "\ -failed to merge --config argument `foo=['a']` - -Caused by: - failed to merge key `foo` between --config cli option and --config cli option - -Caused by: - failed to merge config value from `--config cli option` into `--config cli option`: \ - expected string, but found array", - ); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config_include.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config_include.rs deleted file mode 100644 index aff2a78af..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/config_include.rs +++ /dev/null @@ -1,282 +0,0 @@ -//! Tests for `include` config field. - -use super::config::{assert_error, write_config, write_config_at, ConfigBuilder}; -use cargo_test_support::{no_such_file_err_msg, paths, project}; -use std::fs; - -#[cargo_test] -fn gated() { - // Requires -Z flag. - write_config("include='other'"); - write_config_at( - ".cargo/other", - " - othervalue = 1 - ", - ); - let config = ConfigBuilder::new().build(); - assert_eq!(config.get::>("othervalue").unwrap(), None); - let config = ConfigBuilder::new().unstable_flag("config-include").build(); - assert_eq!(config.get::("othervalue").unwrap(), 1); -} - -#[cargo_test] -fn simple() { - // Simple test. - write_config_at( - ".cargo/config", - " - include = 'other' - key1 = 1 - key2 = 2 - ", - ); - write_config_at( - ".cargo/other", - " - key2 = 3 - key3 = 4 - ", - ); - let config = ConfigBuilder::new().unstable_flag("config-include").build(); - assert_eq!(config.get::("key1").unwrap(), 1); - assert_eq!(config.get::("key2").unwrap(), 2); - assert_eq!(config.get::("key3").unwrap(), 4); -} - -#[cargo_test] -fn works_with_cli() { - write_config_at( - ".cargo/config.toml", - " - include = 'other.toml' - [build] - rustflags = ['-W', 'unused'] - ", - ); - write_config_at( - ".cargo/other.toml", - " - [build] - rustflags = ['-W', 'unsafe-code'] - ", - ); - let p = project().file("src/lib.rs", "").build(); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 [..] -[RUNNING] `rustc [..]-W unused` -[FINISHED] [..] -", - ) - .run(); - p.cargo("build -v -Z config-include") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 [..] -[RUNNING] `rustc [..]-W unsafe-code -W unused` -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn left_to_right() { - // How it merges multiple includes. - write_config_at( - ".cargo/config", - " - include = ['one', 'two'] - primary = 1 - ", - ); - write_config_at( - ".cargo/one", - " - one = 1 - primary = 2 - ", - ); - write_config_at( - ".cargo/two", - " - two = 2 - primary = 3 - ", - ); - let config = ConfigBuilder::new().unstable_flag("config-include").build(); - assert_eq!(config.get::("primary").unwrap(), 1); - assert_eq!(config.get::("one").unwrap(), 1); - assert_eq!(config.get::("two").unwrap(), 2); -} - -#[cargo_test] -fn missing_file() { - // Error when there's a missing file. - write_config("include='missing'"); - let config = ConfigBuilder::new() - .unstable_flag("config-include") - .build_err(); - assert_error( - config.unwrap_err(), - &format!( - "\ -could not load Cargo configuration - -Caused by: - failed to load config include `missing` from `[..]/.cargo/config` - -Caused by: - failed to read configuration file `[..]/.cargo/missing` - -Caused by: - {}", - no_such_file_err_msg() - ), - ); -} - -#[cargo_test] -fn cycle() { - // Detects a cycle. - write_config_at(".cargo/config", "include='one'"); - write_config_at(".cargo/one", "include='two'"); - write_config_at(".cargo/two", "include='config'"); - let config = ConfigBuilder::new() - .unstable_flag("config-include") - .build_err(); - assert_error( - config.unwrap_err(), - "\ -could not load Cargo configuration - -Caused by: - failed to load config include `one` from `[..]/.cargo/config` - -Caused by: - failed to load config include `two` from `[..]/.cargo/one` - -Caused by: - failed to load config include `config` from `[..]/.cargo/two` - -Caused by: - config `include` cycle detected with path `[..]/.cargo/config`", - ); -} - -#[cargo_test] -fn cli_include() { - // Using --config with include. - // CLI takes priority over files. - write_config_at( - ".cargo/config", - " - foo = 1 - bar = 2 - ", - ); - write_config_at(".cargo/config-foo", "foo = 2"); - let config = ConfigBuilder::new() - .unstable_flag("config-include") - .config_arg("include='.cargo/config-foo'") - .build(); - assert_eq!(config.get::("foo").unwrap(), 2); - assert_eq!(config.get::("bar").unwrap(), 2); -} - -#[cargo_test] -fn bad_format() { - // Not a valid format. - write_config("include = 1"); - let config = ConfigBuilder::new() - .unstable_flag("config-include") - .build_err(); - assert_error( - config.unwrap_err(), - "\ -could not load Cargo configuration - -Caused by: - `include` expected a string or list, but found integer in `[..]/.cargo/config`", - ); -} - -#[cargo_test] -fn cli_include_failed() { - // Error message when CLI include fails to load. - let config = ConfigBuilder::new() - .unstable_flag("config-include") - .config_arg("include='foobar'") - .build_err(); - assert_error( - config.unwrap_err(), - &format!( - "\ -failed to load --config include - -Caused by: - failed to load config include `foobar` from `--config cli option` - -Caused by: - failed to read configuration file `[..]/foobar` - -Caused by: - {}", - no_such_file_err_msg() - ), - ); -} - -#[cargo_test] -fn cli_merge_failed() { - // Error message when CLI include merge fails. - write_config("foo = ['a']"); - write_config_at( - ".cargo/other", - " - foo = 'b' - ", - ); - let config = ConfigBuilder::new() - .unstable_flag("config-include") - .config_arg("include='.cargo/other'") - .build_err(); - // Maybe this error message should mention it was from an include file? - assert_error( - config.unwrap_err(), - "\ -failed to merge --config key `foo` into `[..]/.cargo/config` - -Caused by: - failed to merge config value from `[..]/.cargo/other` into `[..]/.cargo/config`: \ - expected array, but found string", - ); -} - -#[cargo_test] -fn cli_path() { - // --config path_to_file - fs::write(paths::root().join("myconfig.toml"), "key = 123").unwrap(); - let config = ConfigBuilder::new() - .cwd(paths::root()) - .unstable_flag("config-include") - .config_arg("myconfig.toml") - .build(); - assert_eq!(config.get::("key").unwrap(), 123); - - let config = ConfigBuilder::new() - .unstable_flag("config-include") - .config_arg("missing.toml") - .build_err(); - assert_error( - config.unwrap_err(), - "\ -failed to parse --config argument `missing.toml` - -Caused by: - expected an equals, found eof at line 1 column 13", - ); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/corrupt_git.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/corrupt_git.rs deleted file mode 100644 index 066fb7a66..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/corrupt_git.rs +++ /dev/null @@ -1,159 +0,0 @@ -//! Tests for corrupt git repos. - -use cargo_test_support::paths; -use cargo_test_support::{basic_manifest, git, project}; -use cargo_util::paths as cargopaths; -use std::fs; -use std::path::{Path, PathBuf}; - -#[cargo_test] -fn deleting_database_files() { - let project = project(); - let git_project = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "") - }); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - bar = {{ git = '{}' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - project.cargo("build").run(); - - let mut files = Vec::new(); - find_files(&paths::home().join(".cargo/git/db"), &mut files); - assert!(!files.is_empty()); - - let log = "cargo::sources::git=trace"; - for file in files { - if !file.exists() { - continue; - } - println!("deleting {}", file.display()); - cargopaths::remove_file(&file).unwrap(); - project.cargo("build -v").env("CARGO_LOG", log).run(); - - if !file.exists() { - continue; - } - println!("truncating {}", file.display()); - make_writable(&file); - fs::OpenOptions::new() - .write(true) - .open(&file) - .unwrap() - .set_len(2) - .unwrap(); - project.cargo("build -v").env("CARGO_LOG", log).run(); - } -} - -#[cargo_test] -fn deleting_checkout_files() { - let project = project(); - let git_project = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "") - }); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - bar = {{ git = '{}' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - project.cargo("build").run(); - - let dir = paths::home() - .join(".cargo/git/checkouts") - // get the first entry in the checkouts dir for the package's location - .read_dir() - .unwrap() - .next() - .unwrap() - .unwrap() - .path() - // get the first child of that checkout dir for our checkout - .read_dir() - .unwrap() - .next() - .unwrap() - .unwrap() - .path() - // and throw on .git to corrupt things - .join(".git"); - let mut files = Vec::new(); - find_files(&dir, &mut files); - assert!(!files.is_empty()); - - let log = "cargo::sources::git=trace"; - for file in files { - if !file.exists() { - continue; - } - println!("deleting {}", file.display()); - cargopaths::remove_file(&file).unwrap(); - project.cargo("build -v").env("CARGO_LOG", log).run(); - - if !file.exists() { - continue; - } - println!("truncating {}", file.display()); - make_writable(&file); - fs::OpenOptions::new() - .write(true) - .open(&file) - .unwrap() - .set_len(2) - .unwrap(); - project.cargo("build -v").env("CARGO_LOG", log).run(); - } -} - -fn make_writable(path: &Path) { - let mut p = path.metadata().unwrap().permissions(); - p.set_readonly(false); - fs::set_permissions(path, p).unwrap(); -} - -fn find_files(path: &Path, dst: &mut Vec) { - for e in path.read_dir().unwrap() { - let e = e.unwrap(); - let path = e.path(); - if e.file_type().unwrap().is_dir() { - find_files(&path, dst); - } else { - dst.push(path); - } - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/credential_process.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/credential_process.rs deleted file mode 100644 index fb24533b5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/credential_process.rs +++ /dev/null @@ -1,462 +0,0 @@ -//! Tests for credential-process. - -use cargo_test_support::{basic_manifest, cargo_process, paths, project, registry, Project}; -use std::fs; -use std::thread; - -fn toml_bin(proj: &Project, name: &str) -> String { - proj.bin(name).display().to_string().replace('\\', "\\\\") -} - -#[cargo_test] -fn gated() { - registry::RegistryBuilder::new() - .alternative(true) - .add_tokens(false) - .build(); - - let p = project() - .file( - ".cargo/config", - r#" - [registry] - credential-process = "false" - "#, - ) - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] no upload token found, please run `cargo login` or pass `--token` -", - ) - .run(); - - p.change_file( - ".cargo/config", - r#" - [registry.alternative] - credential-process = "false" - "#, - ); - - p.cargo("publish --no-verify --registry alternative") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] no upload token found, please run `cargo login` or pass `--token` -", - ) - .run(); -} - -#[cargo_test] -fn warn_both_token_and_process() { - // Specifying both credential-process and a token in config should issue a warning. - registry::RegistryBuilder::new() - .alternative(true) - .add_tokens(false) - .build(); - let p = project() - .file( - ".cargo/config", - r#" - [registries.alternative] - token = "sekrit" - credential-process = "false" - "#, - ) - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - description = "foo" - authors = [] - license = "MIT" - homepage = "https://example.com/" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --registry alternative -Z credential-process") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] both `registries.alternative.token` and `registries.alternative.credential-process` \ -were specified in the config\n\ -Only one of these values may be set, remove one or the other to proceed. -", - ) - .run(); - - // Try with global credential-process, and registry-specific `token`. - // This should silently use the config token, and not run the "false" exe. - p.change_file( - ".cargo/config", - r#" - [registry] - credential-process = "false" - - [registries.alternative] - token = "sekrit" - "#, - ); - p.cargo("publish --no-verify --registry alternative -Z credential-process") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.1.0 [..] -[UPLOADING] foo v0.1.0 [..] -", - ) - .run(); -} - -/// Setup for a test that will issue a command that needs to fetch a token. -/// -/// This does the following: -/// -/// * Spawn a thread that will act as an API server. -/// * Create a simple credential-process that will generate a fake token. -/// * Create a simple `foo` project to run the test against. -/// * Configure the credential-process config. -/// -/// Returns a thread handle for the API server, the test should join it when -/// finished. Also returns the simple `foo` project to test against. -fn get_token_test() -> (Project, thread::JoinHandle<()>) { - // API server that checks that the token is included correctly. - let server = registry::RegistryBuilder::new() - .add_tokens(false) - .build_api_server(&|headers| { - assert!(headers - .iter() - .any(|header| header == "Authorization: sekrit")); - - (200, &r#"{"ok": true, "msg": "completed!"}"#) - }); - - // The credential process to use. - let cred_proj = project() - .at("cred_proj") - .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0")) - .file("src/main.rs", r#"fn main() { println!("sekrit"); } "#) - .build(); - cred_proj.cargo("build").run(); - - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [registries.alternative] - index = "{}" - credential-process = ["{}"] - "#, - registry::alt_registry_url(), - toml_bin(&cred_proj, "test-cred") - ), - ) - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - description = "foo" - authors = [] - license = "MIT" - homepage = "https://example.com/" - "#, - ) - .file("src/lib.rs", "") - .build(); - (p, server) -} - -#[cargo_test] -fn publish() { - // Checks that credential-process is used for `cargo publish`. - let (p, t) = get_token_test(); - - p.cargo("publish --no-verify --registry alternative -Z credential-process") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.1.0 [..] -[UPLOADING] foo v0.1.0 [..] -", - ) - .run(); - - t.join().ok().unwrap(); -} - -#[cargo_test] -fn basic_unsupported() { - // Non-action commands don't support login/logout. - registry::RegistryBuilder::new().add_tokens(false).build(); - cargo_util::paths::append( - &paths::home().join(".cargo/config"), - br#" - [registry] - credential-process = "false" - "#, - ) - .unwrap(); - - cargo_process("login -Z credential-process abcdefg") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] credential process `false` cannot be used to log in, \ -the credential-process configuration value must pass the \ -`{action}` argument in the config to support this command -", - ) - .run(); - - cargo_process("logout -Z credential-process") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] credential process `false` cannot be used to log out, \ -the credential-process configuration value must pass the \ -`{action}` argument in the config to support this command -", - ) - .run(); -} - -#[cargo_test] -fn login() { - registry::init(); - // The credential process to use. - let cred_proj = project() - .at("cred_proj") - .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0")) - .file( - "src/main.rs", - &r#" - use std::io::Read; - - fn main() { - assert_eq!(std::env::var("CARGO_REGISTRY_NAME").unwrap(), "crates-io"); - assert_eq!(std::env::var("CARGO_REGISTRY_API_URL").unwrap(), "__API__"); - assert_eq!(std::env::args().skip(1).next().unwrap(), "store"); - let mut buffer = String::new(); - std::io::stdin().read_to_string(&mut buffer).unwrap(); - assert_eq!(buffer, "abcdefg\n"); - std::fs::write("token-store", buffer).unwrap(); - } - "# - .replace("__API__", ®istry::api_url().to_string()), - ) - .build(); - cred_proj.cargo("build").run(); - - cargo_util::paths::append( - &paths::home().join(".cargo/config"), - format!( - r#" - [registry] - credential-process = ["{}", "{{action}}"] - "#, - toml_bin(&cred_proj, "test-cred") - ) - .as_bytes(), - ) - .unwrap(); - - cargo_process("login -Z credential-process abcdefg") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[LOGIN] token for `crates.io` saved -", - ) - .run(); - assert_eq!( - fs::read_to_string(paths::root().join("token-store")).unwrap(), - "abcdefg\n" - ); -} - -#[cargo_test] -fn logout() { - registry::RegistryBuilder::new().add_tokens(false).build(); - // The credential process to use. - let cred_proj = project() - .at("cred_proj") - .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0")) - .file( - "src/main.rs", - r#" - use std::io::Read; - - fn main() { - assert_eq!(std::env::var("CARGO_REGISTRY_NAME").unwrap(), "crates-io"); - assert_eq!(std::env::args().skip(1).next().unwrap(), "erase"); - std::fs::write("token-store", "").unwrap(); - eprintln!("token for `{}` has been erased!", - std::env::var("CARGO_REGISTRY_NAME").unwrap()); - } - "#, - ) - .build(); - cred_proj.cargo("build").run(); - - cargo_util::paths::append( - &paths::home().join(".cargo/config"), - format!( - r#" - [registry] - credential-process = ["{}", "{{action}}"] - "#, - toml_bin(&cred_proj, "test-cred") - ) - .as_bytes(), - ) - .unwrap(); - - cargo_process("logout -Z credential-process") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -token for `crates-io` has been erased! -[LOGOUT] token for `crates.io` has been removed from local storage -", - ) - .run(); - assert_eq!( - fs::read_to_string(paths::root().join("token-store")).unwrap(), - "" - ); -} - -#[cargo_test] -fn yank() { - let (p, t) = get_token_test(); - - p.cargo("yank --vers 0.1.0 --registry alternative -Z credential-process") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[YANK] foo:0.1.0 -", - ) - .run(); - - t.join().ok().unwrap(); -} - -#[cargo_test] -fn owner() { - let (p, t) = get_token_test(); - - p.cargo("owner --add username --registry alternative -Z credential-process") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[OWNER] completed! -", - ) - .run(); - - t.join().ok().unwrap(); -} - -#[cargo_test] -fn libexec_path() { - // cargo: prefixed names use the sysroot - registry::RegistryBuilder::new().add_tokens(false).build(); - cargo_util::paths::append( - &paths::home().join(".cargo/config"), - br#" - [registry] - credential-process = "cargo:doesnotexist" - "#, - ) - .unwrap(); - - cargo_process("login -Z credential-process abcdefg") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - // FIXME: Update "Caused by" error message once rust/pull/87704 is merged. - // On Windows, changing to a custom executable resolver has changed the - // error messages. - &format!("\ -[UPDATING] [..] -[ERROR] failed to execute `[..]libexec/cargo-credential-doesnotexist[EXE]` to store authentication token for registry `crates-io` - -Caused by: - [..] -"), - ) - .run(); -} - -#[cargo_test] -fn invalid_token_output() { - // Error when credential process does not output the expected format for a token. - registry::RegistryBuilder::new() - .alternative(true) - .add_tokens(false) - .build(); - let cred_proj = project() - .at("cred_proj") - .file("Cargo.toml", &basic_manifest("test-cred", "1.0.0")) - .file("src/main.rs", r#"fn main() { print!("a\nb\n"); } "#) - .build(); - cred_proj.cargo("build").run(); - - cargo_util::paths::append( - &paths::home().join(".cargo/config"), - format!( - r#" - [registry] - credential-process = ["{}"] - "#, - toml_bin(&cred_proj, "test-cred") - ) - .as_bytes(), - ) - .unwrap(); - - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --registry alternative -Z credential-process") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] credential process `[..]test-cred[EXE]` returned more than one line of output; expected a single token -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cross_compile.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cross_compile.rs deleted file mode 100644 index 66d2177a3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cross_compile.rs +++ /dev/null @@ -1,1329 +0,0 @@ -//! Tests for cross compiling with --target. -//! -//! See `cargo_test_support::cross_compile` for more detail. - -use cargo_test_support::{basic_bin_manifest, basic_manifest, cross_compile, project}; -use cargo_test_support::{is_nightly, rustc_host}; - -#[cargo_test] -fn simple_cross() { - if cross_compile::disabled() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - &format!( - r#" - fn main() {{ - assert_eq!(std::env::var("TARGET").unwrap(), "{}"); - }} - "#, - cross_compile::alternate() - ), - ) - .file( - "src/main.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - let target = cross_compile::alternate(); - p.cargo("build -v --target").arg(&target).run(); - assert!(p.target_bin(target, "foo").is_file()); - - if cross_compile::can_run_on_host() { - p.process(&p.target_bin(target, "foo")).run(); - } -} - -#[cargo_test] -fn simple_cross_config() { - if cross_compile::disabled() { - return; - } - - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [build] - target = "{}" - "#, - cross_compile::alternate() - ), - ) - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - &format!( - r#" - fn main() {{ - assert_eq!(std::env::var("TARGET").unwrap(), "{}"); - }} - "#, - cross_compile::alternate() - ), - ) - .file( - "src/main.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - let target = cross_compile::alternate(); - p.cargo("build -v").run(); - assert!(p.target_bin(target, "foo").is_file()); - - if cross_compile::can_run_on_host() { - p.process(&p.target_bin(target, "foo")).run(); - } -} - -#[cargo_test] -fn simple_deps() { - if cross_compile::disabled() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }") - .build(); - let _p2 = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let target = cross_compile::alternate(); - p.cargo("build --target").arg(&target).run(); - assert!(p.target_bin(target, "foo").is_file()); - - if cross_compile::can_run_on_host() { - p.process(&p.target_bin(target, "foo")).run(); - } -} - -/// Always take care of setting these so that -/// `cross_compile::alternate()` is the actually-picked target -fn per_crate_target_test( - default_target: Option<&'static str>, - forced_target: Option<&'static str>, - arg_target: Option<&'static str>, -) { - if cross_compile::disabled() { - return; - } - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - cargo-features = ["per-package-target"] - - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - {} - {} - "#, - default_target - .map(|t| format!(r#"default-target = "{}""#, t)) - .unwrap_or(String::new()), - forced_target - .map(|t| format!(r#"forced-target = "{}""#, t)) - .unwrap_or(String::new()), - ), - ) - .file( - "build.rs", - &format!( - r#" - fn main() {{ - assert_eq!(std::env::var("TARGET").unwrap(), "{}"); - }} - "#, - cross_compile::alternate() - ), - ) - .file( - "src/main.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - let mut cmd = p.cargo("build -v"); - if let Some(t) = arg_target { - cmd.arg("--target").arg(&t); - } - cmd.masquerade_as_nightly_cargo().run(); - assert!(p.target_bin(cross_compile::alternate(), "foo").is_file()); - - if cross_compile::can_run_on_host() { - p.process(&p.target_bin(cross_compile::alternate(), "foo")) - .run(); - } -} - -#[cargo_test] -fn per_crate_default_target_is_default() { - per_crate_target_test(Some(cross_compile::alternate()), None, None); -} - -#[cargo_test] -fn per_crate_default_target_gets_overridden() { - per_crate_target_test( - Some(cross_compile::unused()), - None, - Some(cross_compile::alternate()), - ); -} - -#[cargo_test] -fn per_crate_forced_target_is_default() { - per_crate_target_test(None, Some(cross_compile::alternate()), None); -} - -#[cargo_test] -fn per_crate_forced_target_does_not_get_overridden() { - per_crate_target_test( - None, - Some(cross_compile::alternate()), - Some(cross_compile::unused()), - ); -} - -#[cargo_test] -fn workspace_with_multiple_targets() { - if cross_compile::disabled() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["native", "cross"] - "#, - ) - .file( - "native/Cargo.toml", - r#" - cargo-features = ["per-package-target"] - - [package] - name = "native" - version = "0.0.0" - authors = [] - build = "build.rs" - "#, - ) - .file( - "native/build.rs", - &format!( - r#" - fn main() {{ - assert_eq!(std::env::var("TARGET").unwrap(), "{}"); - }} - "#, - cross_compile::native() - ), - ) - .file( - "native/src/main.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::native_arch() - ), - ) - .file( - "cross/Cargo.toml", - &format!( - r#" - cargo-features = ["per-package-target"] - - [package] - name = "cross" - version = "0.0.0" - authors = [] - build = "build.rs" - default-target = "{}" - "#, - cross_compile::alternate(), - ), - ) - .file( - "cross/build.rs", - &format!( - r#" - fn main() {{ - assert_eq!(std::env::var("TARGET").unwrap(), "{}"); - }} - "#, - cross_compile::alternate() - ), - ) - .file( - "cross/src/main.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - let mut cmd = p.cargo("build -v"); - cmd.masquerade_as_nightly_cargo().run(); - - assert!(p.bin("native").is_file()); - assert!(p.target_bin(cross_compile::alternate(), "cross").is_file()); - - p.process(&p.bin("native")).run(); - if cross_compile::can_run_on_host() { - p.process(&p.target_bin(cross_compile::alternate(), "cross")) - .run(); - } -} - -#[cargo_test] -fn linker() { - if cross_compile::disabled() { - return; - } - - let target = cross_compile::alternate(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - linker = "my-linker-tool" - "#, - target - ), - ) - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/foo.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - p.cargo("build -v --target") - .arg(&target) - .with_status(101) - .with_stderr_contains(&format!( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc --crate-name foo src/foo.rs [..]--crate-type bin \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [CWD]/target/{target}/debug/deps \ - --target {target} \ - -C linker=my-linker-tool \ - -L dependency=[CWD]/target/{target}/debug/deps \ - -L dependency=[CWD]/target/debug/deps` -", - target = target, - )) - .run(); -} - -#[cargo_test] -fn plugin_with_extra_dylib_dep() { - if cross_compile::disabled() { - return; - } - if !is_nightly() { - // plugins are unstable - return; - } - - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - r#" - #![feature(plugin)] - #![plugin(bar)] - - fn main() {} - "#, - ) - .build(); - let _bar = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - plugin = true - - [dependencies.baz] - path = "../baz" - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(rustc_private)] - - extern crate baz; - extern crate rustc_driver; - - use rustc_driver::plugin::Registry; - - #[no_mangle] - pub fn __rustc_plugin_registrar(reg: &mut Registry) { - println!("{}", baz::baz()); - } - "#, - ) - .build(); - let _baz = project() - .at("baz") - .file( - "Cargo.toml", - r#" - [package] - name = "baz" - version = "0.0.1" - authors = [] - - [lib] - name = "baz" - crate_type = ["dylib"] - "#, - ) - .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") - .build(); - - let target = cross_compile::alternate(); - foo.cargo("build --target").arg(&target).run(); -} - -#[cargo_test] -fn cross_tests() { - if !cross_compile::can_run_on_host() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - authors = [] - version = "0.0.0" - - [[bin]] - name = "bar" - "#, - ) - .file( - "src/bin/bar.rs", - &format!( - r#" - #[allow(unused_extern_crates)] - extern crate foo; - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - #[test] fn test() {{ main() }} - "#, - cross_compile::alternate_arch() - ), - ) - .file( - "src/lib.rs", - &format!( - r#" - use std::env; - pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }} - #[test] fn test_foo() {{ foo() }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - let target = cross_compile::alternate(); - p.cargo("test --target") - .arg(&target) - .with_stderr(&format!( - "\ -[COMPILING] foo v0.0.0 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/{triple}/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/{triple}/debug/deps/bar-[..][EXE])", - triple = target - )) - .with_stdout_contains("test test_foo ... ok") - .with_stdout_contains("test test ... ok") - .run(); -} - -#[cargo_test] -fn no_cross_doctests() { - if cross_compile::disabled() { - return; - } - - let p = project() - .file( - "src/lib.rs", - r#" - //! ``` - //! extern crate foo; - //! assert!(true); - //! ``` - "#, - ) - .build(); - - let host_output = "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[DOCTEST] foo -"; - - println!("a"); - p.cargo("test").with_stderr(&host_output).run(); - - println!("b"); - let target = rustc_host(); - p.cargo("test --target") - .arg(&target) - .with_stderr(&format!( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/{triple}/debug/deps/foo-[..][EXE]) -[DOCTEST] foo -", - triple = target - )) - .run(); - - println!("c"); - let target = cross_compile::alternate(); - - // This will build the library, but does not build or run doc tests. - // This should probably be a warning or error. - p.cargo("test -v --doc --target") - .arg(&target) - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo [..] -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[NOTE] skipping doctests for foo v0.0.1 ([ROOT]/foo) (lib), \ -cross-compilation doctests are not yet supported -See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \ -for more information. -", - ) - .run(); - - if !cross_compile::can_run_on_host() { - return; - } - - // This tests the library, but does not run the doc tests. - p.cargo("test -v --target") - .arg(&target) - .with_stderr(&format!( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo [..]--test[..] -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[CWD]/target/{triple}/debug/deps/foo-[..][EXE]` -[NOTE] skipping doctests for foo v0.0.1 ([ROOT]/foo) (lib), \ -cross-compilation doctests are not yet supported -See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#doctest-xcompile \ -for more information. -", - triple = target - )) - .run(); -} - -#[cargo_test] -fn simple_cargo_run() { - if !cross_compile::can_run_on_host() { - return; - } - - let p = project() - .file( - "src/main.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - let target = cross_compile::alternate(); - p.cargo("run --target").arg(&target).run(); -} - -#[cargo_test] -fn cross_with_a_build_script() { - if cross_compile::disabled() { - return; - } - - let target = cross_compile::alternate(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = 'build.rs' - "#, - ) - .file( - "build.rs", - &format!( - r#" - use std::env; - use std::path::PathBuf; - fn main() {{ - assert_eq!(env::var("TARGET").unwrap(), "{0}"); - let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out"); - path.pop(); - assert!(path.file_name().unwrap().to_str().unwrap() - .starts_with("foo-")); - path.pop(); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build"); - path.pop(); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug"); - path.pop(); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}"); - path.pop(); - assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target"); - }} - "#, - target - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v --target") - .arg(&target) - .with_stderr(&format!( - "\ -[COMPILING] foo v0.0.0 ([CWD]) -[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..]` -[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build` -[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - target = target, - )) - .run(); -} - -#[cargo_test] -fn build_script_needed_for_host_and_target() { - if cross_compile::disabled() { - return; - } - - let target = cross_compile::alternate(); - let host = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = 'build.rs' - - [dependencies.d1] - path = "d1" - [build-dependencies.d2] - path = "d2" - "#, - ) - .file( - "build.rs", - r#" - #[allow(unused_extern_crates)] - extern crate d2; - fn main() { d2::d2(); } - "#, - ) - .file( - "src/main.rs", - " - #[allow(unused_extern_crates)] - extern crate d1; - fn main() { d1::d1(); } - ", - ) - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - build = 'build.rs' - "#, - ) - .file("d1/src/lib.rs", "pub fn d1() {}") - .file( - "d1/build.rs", - r#" - use std::env; - fn main() { - let target = env::var("TARGET").unwrap(); - println!("cargo:rustc-flags=-L /path/to/{}", target); - } - "#, - ) - .file( - "d2/Cargo.toml", - r#" - [package] - name = "d2" - version = "0.0.0" - authors = [] - - [dependencies.d1] - path = "../d1" - "#, - ) - .file( - "d2/src/lib.rs", - " - #[allow(unused_extern_crates)] - extern crate d1; - pub fn d2() { d1::d1(); } - ", - ) - .build(); - - p.cargo("build -v --target") - .arg(&target) - .with_stderr_contains(&"[COMPILING] d1 v0.0.0 ([CWD]/d1)") - .with_stderr_contains( - "[RUNNING] `rustc [..] d1/build.rs [..] --out-dir [CWD]/target/debug/build/d1-[..]`", - ) - .with_stderr_contains("[RUNNING] `[CWD]/target/debug/build/d1-[..]/build-script-build`") - .with_stderr_contains("[RUNNING] `rustc [..] d1/src/lib.rs [..]`") - .with_stderr_contains("[COMPILING] d2 v0.0.0 ([CWD]/d2)") - .with_stderr_contains(&format!( - "[RUNNING] `rustc [..] d2/src/lib.rs [..] -L /path/to/{host}`", - host = host - )) - .with_stderr_contains("[COMPILING] foo v0.0.0 ([CWD])") - .with_stderr_contains(&format!( - "[RUNNING] `rustc [..] build.rs [..] --out-dir [CWD]/target/debug/build/foo-[..] \ - -L /path/to/{host}`", - host = host - )) - .with_stderr_contains(&format!( - "[RUNNING] `rustc [..] src/main.rs [..] --target {target} [..] \ - -L /path/to/{target}`", - target = target - )) - .run(); -} - -#[cargo_test] -fn build_deps_for_the_right_arch() { - if cross_compile::disabled() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.d2] - path = "d2" - "#, - ) - .file("src/main.rs", "extern crate d2; fn main() {}") - .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0")) - .file("d1/src/lib.rs", "pub fn d1() {}") - .file( - "d2/Cargo.toml", - r#" - [package] - name = "d2" - version = "0.0.0" - authors = [] - build = "build.rs" - - [build-dependencies.d1] - path = "../d1" - "#, - ) - .file("d2/build.rs", "extern crate d1; fn main() {}") - .file("d2/src/lib.rs", "") - .build(); - - let target = cross_compile::alternate(); - p.cargo("build -v --target").arg(&target).run(); -} - -#[cargo_test] -fn build_script_only_host() { - if cross_compile::disabled() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - - [build-dependencies.d1] - path = "d1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("build.rs", "extern crate d1; fn main() {}") - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - build = "build.rs" - "#, - ) - .file("d1/src/lib.rs", "pub fn d1() {}") - .file( - "d1/build.rs", - r#" - use std::env; - - fn main() { - assert!(env::var("OUT_DIR").unwrap().replace("\\", "/") - .contains("target/debug/build/d1-"), - "bad: {:?}", env::var("OUT_DIR")); - } - "#, - ) - .build(); - - let target = cross_compile::alternate(); - p.cargo("build -v --target").arg(&target).run(); -} - -#[cargo_test] -fn plugin_build_script_right_arch() { - if cross_compile::disabled() { - return; - } - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [lib] - name = "foo" - plugin = true - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v --target") - .arg(cross_compile::alternate()) - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] build.rs [..]` -[RUNNING] `[..]/build-script-build` -[RUNNING] `rustc [..] src/lib.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_script_with_platform_specific_dependencies() { - if cross_compile::disabled() { - return; - } - - let target = cross_compile::alternate(); - let host = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [build-dependencies.d1] - path = "d1" - "#, - ) - .file( - "build.rs", - " - #[allow(unused_extern_crates)] - extern crate d1; - fn main() {} - ", - ) - .file("src/lib.rs", "") - .file( - "d1/Cargo.toml", - &format!( - r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - - [target.{}.dependencies] - d2 = {{ path = "../d2" }} - "#, - host - ), - ) - .file( - "d1/src/lib.rs", - "#[allow(unused_extern_crates)] extern crate d2;", - ) - .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.0")) - .file("d2/src/lib.rs", "") - .build(); - - p.cargo("build -v --target") - .arg(&target) - .with_stderr(&format!( - "\ -[COMPILING] d2 v0.0.0 ([..]) -[RUNNING] `rustc [..] d2/src/lib.rs [..]` -[COMPILING] d1 v0.0.0 ([..]) -[RUNNING] `rustc [..] d1/src/lib.rs [..]` -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] build.rs [..]` -[RUNNING] `[CWD]/target/debug/build/foo-[..]/build-script-build` -[RUNNING] `rustc [..] src/lib.rs [..] --target {target} [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - target = target - )) - .run(); -} - -#[cargo_test] -fn platform_specific_dependencies_do_not_leak() { - if cross_compile::disabled() { - return; - } - - let target = cross_compile::alternate(); - let host = rustc_host(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [dependencies.d1] - path = "d1" - - [build-dependencies.d1] - path = "d1" - "#, - ) - .file("build.rs", "extern crate d1; fn main() {}") - .file("src/lib.rs", "") - .file( - "d1/Cargo.toml", - &format!( - r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - - [target.{}.dependencies] - d2 = {{ path = "../d2" }} - "#, - host - ), - ) - .file("d1/src/lib.rs", "extern crate d2;") - .file("d1/Cargo.toml", &basic_manifest("d1", "0.0.0")) - .file("d2/src/lib.rs", "") - .build(); - - p.cargo("build -v --target") - .arg(&target) - .with_status(101) - .with_stderr_contains("[..] can't find crate for `d2`[..]") - .run(); -} - -#[cargo_test] -fn platform_specific_variables_reflected_in_build_scripts() { - if cross_compile::disabled() { - return; - } - - let target = cross_compile::alternate(); - let host = rustc_host(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [target.{host}.dependencies] - d1 = {{ path = "d1" }} - - [target.{target}.dependencies] - d2 = {{ path = "d2" }} - "#, - host = host, - target = target - ), - ) - .file( - "build.rs", - &format!( - r#" - use std::env; - - fn main() {{ - let platform = env::var("TARGET").unwrap(); - let (expected, not_expected) = match &platform[..] {{ - "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"), - "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"), - _ => panic!("unknown platform") - }}; - - env::var(expected).ok() - .expect(&format!("missing {{}}", expected)); - env::var(not_expected).err() - .expect(&format!("found {{}}", not_expected)); - }} - "#, - host = host, - target = target - ), - ) - .file("src/lib.rs", "") - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.0" - authors = [] - links = "d1" - build = "build.rs" - "#, - ) - .file("d1/build.rs", r#"fn main() { println!("cargo:val=1") }"#) - .file("d1/src/lib.rs", "") - .file( - "d2/Cargo.toml", - r#" - [package] - name = "d2" - version = "0.0.0" - authors = [] - links = "d2" - build = "build.rs" - "#, - ) - .file("d2/build.rs", r#"fn main() { println!("cargo:val=1") }"#) - .file("d2/src/lib.rs", "") - .build(); - - p.cargo("build -v").run(); - p.cargo("build -v --target").arg(&target).run(); -} - -#[cargo_test] -// Don't have a dylib cross target on macos. -#[cfg_attr(target_os = "macos", ignore)] -fn cross_test_dylib() { - if cross_compile::disabled() { - return; - } - - let target = cross_compile::alternate(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate_type = ["dylib"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar as the_bar; - - pub fn bar() { the_bar::baz(); } - - #[test] - fn foo() { bar(); } - "#, - ) - .file( - "tests/test.rs", - r#" - extern crate foo as the_foo; - - #[test] - fn foo() { the_foo::bar(); } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - crate_type = ["dylib"] - "#, - ) - .file( - "bar/src/lib.rs", - &format!( - r#" - use std::env; - pub fn baz() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - p.cargo("test --target") - .arg(&target) - .with_stderr(&format!( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/{arch}/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/{arch}/debug/deps/test-[..][EXE])", - arch = cross_compile::alternate() - )) - .with_stdout_contains_n("test foo ... ok", 2) - .run(); -} - -#[cargo_test] -fn doctest_xcompile_linker() { - if cross_compile::disabled() { - return; - } - if !is_nightly() { - // -Zdoctest-xcompile is unstable - return; - } - - let target = cross_compile::alternate(); - let p = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - linker = "my-linker-tool" - "#, - target - ), - ) - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file( - "src/lib.rs", - r#" - /// ``` - /// assert_eq!(1, 1); - /// ``` - pub fn foo() {} - "#, - ) - .build(); - - // Fails because `my-linker-tool` doesn't actually exist. - p.cargo("test --doc -v -Zdoctest-xcompile --target") - .arg(&target) - .with_status(101) - .masquerade_as_nightly_cargo() - .with_stderr_contains(&format!( - "\ -[RUNNING] `rustdoc --crate-type lib --crate-name foo --test [..]\ - --target {target} [..] -C linker=my-linker-tool[..] -", - target = target, - )) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cross_publish.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cross_publish.rs deleted file mode 100644 index c2f08bc40..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/cross_publish.rs +++ /dev/null @@ -1,114 +0,0 @@ -//! Tests for publishing using the `--target` flag. - -use std::fs::File; - -use cargo_test_support::{cross_compile, project, publish, registry}; - -#[cargo_test] -fn simple_cross_package() { - if cross_compile::disabled() { - return; - } - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - license = "MIT" - description = "foo" - repository = "bar" - "#, - ) - .file( - "src/main.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - let target = cross_compile::alternate(); - - p.cargo("package --target") - .arg(&target) - .with_stderr( - "\ -[PACKAGING] foo v0.0.0 ([CWD]) -[VERIFYING] foo v0.0.0 ([CWD]) -[COMPILING] foo v0.0.0 ([CWD]/target/package/foo-0.0.0) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - // Check that the tarball contains the files - let f = File::open(&p.root().join("target/package/foo-0.0.0.crate")).unwrap(); - publish::validate_crate_contents( - f, - "foo-0.0.0.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - &[], - ); -} - -#[cargo_test] -fn publish_with_target() { - if cross_compile::disabled() { - return; - } - - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - license = "MIT" - description = "foo" - repository = "bar" - "#, - ) - .file( - "src/main.rs", - &format!( - r#" - use std::env; - fn main() {{ - assert_eq!(env::consts::ARCH, "{}"); - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - let target = cross_compile::alternate(); - - p.cargo("publish --token sekrit") - .arg("--target") - .arg(&target) - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[PACKAGING] foo v0.0.0 ([CWD]) -[VERIFYING] foo v0.0.0 ([CWD]) -[COMPILING] foo v0.0.0 ([CWD]/target/package/foo-0.0.0) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[UPLOADING] foo v0.0.0 ([CWD]) -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/custom_target.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/custom_target.rs deleted file mode 100644 index 8b4c9ac3d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/custom_target.rs +++ /dev/null @@ -1,236 +0,0 @@ -//! Tests for custom json target specifications. - -use cargo_test_support::is_nightly; -use cargo_test_support::{basic_manifest, project}; -use std::fs; - -const MINIMAL_LIB: &str = r#" -#![feature(no_core)] -#![feature(lang_items)] -#![no_core] - -#[lang = "sized"] -pub trait Sized { - // Empty. -} -#[lang = "copy"] -pub trait Copy { - // Empty. -} -"#; - -const SIMPLE_SPEC: &str = r#" -{ - "llvm-target": "x86_64-unknown-none-gnu", - "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", - "arch": "x86_64", - "target-endian": "little", - "target-pointer-width": "64", - "target-c-int-width": "32", - "os": "none", - "linker-flavor": "ld.lld", - "linker": "rust-lld", - "executables": true -} -"#; - -#[cargo_test] -fn custom_target_minimal() { - if !is_nightly() { - // Requires features no_core, lang_items - return; - } - let p = project() - .file( - "src/lib.rs", - &" - __MINIMAL_LIB__ - - pub fn foo() -> u32 { - 42 - } - " - .replace("__MINIMAL_LIB__", MINIMAL_LIB), - ) - .file("custom-target.json", SIMPLE_SPEC) - .build(); - - p.cargo("build --lib --target custom-target.json -v").run(); - p.cargo("build --lib --target src/../custom-target.json -v") - .run(); - - // Ensure that the correct style of flag is passed to --target with doc tests. - p.cargo("test --doc --target src/../custom-target.json -v -Zdoctest-xcompile") - .masquerade_as_nightly_cargo() - .with_stderr_contains("[RUNNING] `rustdoc [..]--target [..]foo/custom-target.json[..]") - .run(); -} - -#[cargo_test] -fn custom_target_dependency() { - if !is_nightly() { - // Requires features no_core, lang_items, auto_traits - return; - } - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "foo" - version = "0.0.1" - authors = ["author@example.com"] - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(no_core)] - #![feature(lang_items)] - #![feature(auto_traits)] - #![no_core] - - extern crate bar; - - pub fn foo() -> u32 { - bar::bar() - } - - #[lang = "freeze"] - unsafe auto trait Freeze {} - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file( - "bar/src/lib.rs", - &" - __MINIMAL_LIB__ - - pub fn bar() -> u32 { - 42 - } - " - .replace("__MINIMAL_LIB__", MINIMAL_LIB), - ) - .file("custom-target.json", SIMPLE_SPEC) - .build(); - - p.cargo("build --lib --target custom-target.json -v").run(); -} - -#[cargo_test] -fn custom_bin_target() { - if !is_nightly() { - // Requires features no_core, lang_items - return; - } - let p = project() - .file( - "src/main.rs", - &" - #![no_main] - __MINIMAL_LIB__ - " - .replace("__MINIMAL_LIB__", MINIMAL_LIB), - ) - .file("custom-bin-target.json", SIMPLE_SPEC) - .build(); - - p.cargo("build --target custom-bin-target.json -v").run(); -} - -#[cargo_test] -fn changing_spec_rebuilds() { - // Changing the .json file will trigger a rebuild. - if !is_nightly() { - // Requires features no_core, lang_items - return; - } - let p = project() - .file( - "src/lib.rs", - &" - __MINIMAL_LIB__ - - pub fn foo() -> u32 { - 42 - } - " - .replace("__MINIMAL_LIB__", MINIMAL_LIB), - ) - .file("custom-target.json", SIMPLE_SPEC) - .build(); - - p.cargo("build --lib --target custom-target.json -v").run(); - p.cargo("build --lib --target custom-target.json -v") - .with_stderr( - "\ -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); - let spec_path = p.root().join("custom-target.json"); - let spec = fs::read_to_string(&spec_path).unwrap(); - // Some arbitrary change that I hope is safe. - let spec = spec.replace('{', "{\n\"vendor\": \"unknown\",\n"); - fs::write(&spec_path, spec).unwrap(); - p.cargo("build --lib --target custom-target.json -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 [..] -[RUNNING] `rustc [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn changing_spec_relearns_crate_types() { - // Changing the .json file will invalidate the cache of crate types. - if !is_nightly() { - // Requires features no_core, lang_items - return; - } - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [lib] - crate-type = ["cdylib"] - "#, - ) - .file("src/lib.rs", MINIMAL_LIB) - .file("custom-target.json", SIMPLE_SPEC) - .build(); - - p.cargo("build --lib --target custom-target.json -v") - .with_status(101) - .with_stderr("error: cannot produce cdylib for `foo [..]") - .run(); - - // Enable dynamic linking. - let spec_path = p.root().join("custom-target.json"); - let spec = fs::read_to_string(&spec_path).unwrap(); - let spec = spec.replace('{', "{\n\"dynamic-linking\": true,\n"); - fs::write(&spec_path, spec).unwrap(); - - p.cargo("build --lib --target custom-target.json -v") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc [..] -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/death.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/death.rs deleted file mode 100644 index d140534d5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/death.rs +++ /dev/null @@ -1,101 +0,0 @@ -//! Tests for ctrl-C handling. - -use std::fs; -use std::io::{self, Read}; -use std::net::TcpListener; -use std::process::{Child, Stdio}; -use std::thread; - -use cargo_test_support::{project, slow_cpu_multiplier}; - -#[cargo_test] -fn ctrl_c_kills_everyone() { - let listener = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = listener.local_addr().unwrap(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - &format!( - r#" - use std::net::TcpStream; - use std::io::Read; - - fn main() {{ - let mut socket = TcpStream::connect("{}").unwrap(); - let _ = socket.read(&mut [0; 10]); - panic!("that read should never return"); - }} - "#, - addr - ), - ) - .build(); - - let mut cargo = p.cargo("build").build_command(); - cargo - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1"); - let mut child = cargo.spawn().unwrap(); - - let mut sock = listener.accept().unwrap().0; - ctrl_c(&mut child); - - assert!(!child.wait().unwrap().success()); - match sock.read(&mut [0; 10]) { - Ok(n) => assert_eq!(n, 0), - Err(e) => assert_eq!(e.kind(), io::ErrorKind::ConnectionReset), - } - - // Ok so what we just did was spawn cargo that spawned a build script, then - // we killed cargo in hopes of it killing the build script as well. If all - // went well the build script is now dead. On Windows, however, this is - // enforced with job objects which means that it may actually be in the - // *process* of being torn down at this point. - // - // Now on Windows we can't completely remove a file until all handles to it - // have been closed. Including those that represent running processes. So if - // we were to return here then there may still be an open reference to some - // file in the build directory. What we want to actually do is wait for the - // build script to *complete* exit. Take care of that by blowing away the - // build directory here, and panicking if we eventually spin too long - // without being able to. - for i in 0..10 { - match fs::remove_dir_all(&p.root().join("target")) { - Ok(()) => return, - Err(e) => println!("attempt {}: {}", i, e), - } - thread::sleep(slow_cpu_multiplier(100)); - } - - panic!( - "couldn't remove build directory after a few tries, seems like \ - we won't be able to!" - ); -} - -#[cfg(unix)] -pub fn ctrl_c(child: &mut Child) { - let r = unsafe { libc::kill(-(child.id() as i32), libc::SIGINT) }; - if r < 0 { - panic!("failed to kill: {}", io::Error::last_os_error()); - } -} - -#[cfg(windows)] -pub fn ctrl_c(child: &mut Child) { - child.kill().unwrap(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/dep_info.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/dep_info.rs deleted file mode 100644 index ae385b137..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/dep_info.rs +++ /dev/null @@ -1,615 +0,0 @@ -//! Tests for dep-info files. This includes the dep-info file Cargo creates in -//! the output directory, and the ones stored in the fingerprint. - -use cargo_test_support::compare::assert_match_exact; -use cargo_test_support::paths::{self, CargoPathExt}; -use cargo_test_support::registry::Package; -use cargo_test_support::{ - basic_bin_manifest, basic_manifest, is_nightly, main_file, project, rustc_host, Project, -}; -use filetime::FileTime; -use std::convert::TryInto; -use std::fs; -use std::path::Path; -use std::str; - -// Helper for testing dep-info files in the fingerprint dir. -#[track_caller] -fn assert_deps(project: &Project, fingerprint: &str, test_cb: impl Fn(&Path, &[(u8, &str)])) { - let mut files = project - .glob(fingerprint) - .map(|f| f.expect("unwrap glob result")) - // Filter out `.json` entries. - .filter(|f| f.extension().is_none()); - let info_path = files - .next() - .unwrap_or_else(|| panic!("expected 1 dep-info file at {}, found 0", fingerprint)); - assert!(files.next().is_none(), "expected only 1 dep-info file"); - let dep_info = fs::read(&info_path).unwrap(); - let dep_info = &mut &dep_info[..]; - let deps = (0..read_usize(dep_info)) - .map(|_| { - ( - read_u8(dep_info), - str::from_utf8(read_bytes(dep_info)).unwrap(), - ) - }) - .collect::>(); - test_cb(&info_path, &deps); - - fn read_usize(bytes: &mut &[u8]) -> usize { - let ret = &bytes[..4]; - *bytes = &bytes[4..]; - - u32::from_le_bytes(ret.try_into().unwrap()) as usize - } - - fn read_u8(bytes: &mut &[u8]) -> u8 { - let ret = bytes[0]; - *bytes = &bytes[1..]; - ret - } - - fn read_bytes<'a>(bytes: &mut &'a [u8]) -> &'a [u8] { - let n = read_usize(bytes); - let ret = &bytes[..n]; - *bytes = &bytes[n..]; - ret - } -} - -fn assert_deps_contains(project: &Project, fingerprint: &str, expected: &[(u8, &str)]) { - assert_deps(project, fingerprint, |info_path, entries| { - for (e_kind, e_path) in expected { - let pattern = glob::Pattern::new(e_path).unwrap(); - let count = entries - .iter() - .filter(|(kind, path)| kind == e_kind && pattern.matches(path)) - .count(); - if count != 1 { - panic!( - "Expected 1 match of {} {} in {:?}, got {}:\n{:#?}", - e_kind, e_path, info_path, count, entries - ); - } - } - }) -} - -#[cargo_test] -fn build_dep_info() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("build").run(); - - let depinfo_bin_path = &p.bin("foo").with_extension("d"); - - assert!(depinfo_bin_path.is_file()); - - let depinfo = p.read_file(depinfo_bin_path.to_str().unwrap()); - - let bin_path = p.bin("foo"); - let src_path = p.root().join("src").join("foo.rs"); - if !depinfo.lines().any(|line| { - line.starts_with(&format!("{}:", bin_path.display())) - && line.contains(src_path.to_str().unwrap()) - }) { - panic!( - "Could not find {:?}: {:?} in {:?}", - bin_path, src_path, depinfo_bin_path - ); - } -} - -#[cargo_test] -fn build_dep_info_lib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["lib"] - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .build(); - - p.cargo("build --example=ex").run(); - assert!(p.example_lib("ex", "lib").with_extension("d").is_file()); -} - -#[cargo_test] -fn build_dep_info_rlib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["rlib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .build(); - - p.cargo("build --example=ex").run(); - assert!(p.example_lib("ex", "rlib").with_extension("d").is_file()); -} - -#[cargo_test] -fn build_dep_info_dylib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["dylib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .build(); - - p.cargo("build --example=ex").run(); - assert!(p.example_lib("ex", "dylib").with_extension("d").is_file()); -} - -#[cargo_test] -fn dep_path_inside_target_has_correct_path() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("a")) - .file("target/debug/blah", "") - .file( - "src/main.rs", - r#" - fn main() { - let x = include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/target/debug/blah")); - } - "#, - ) - .build(); - - p.cargo("build").run(); - - let depinfo_path = &p.bin("a").with_extension("d"); - - assert!(depinfo_path.is_file(), "{:?}", depinfo_path); - - let depinfo = p.read_file(depinfo_path.to_str().unwrap()); - - let bin_path = p.bin("a"); - let target_debug_blah = Path::new("target").join("debug").join("blah"); - if !depinfo.lines().any(|line| { - line.starts_with(&format!("{}:", bin_path.display())) - && line.contains(target_debug_blah.to_str().unwrap()) - }) { - panic!( - "Could not find {:?}: {:?} in {:?}", - bin_path, target_debug_blah, depinfo_path - ); - } -} - -#[cargo_test] -fn no_rewrite_if_no_change() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build").run(); - let dep_info = p.root().join("target/debug/libfoo.d"); - let metadata1 = dep_info.metadata().unwrap(); - p.cargo("build").run(); - let metadata2 = dep_info.metadata().unwrap(); - - assert_eq!( - FileTime::from_last_modification_time(&metadata1), - FileTime::from_last_modification_time(&metadata2), - ); -} - -#[cargo_test] -fn relative_depinfo_paths_ws() { - if !is_nightly() { - // -Z binary-dep-depinfo is unstable (https://github.com/rust-lang/rust/issues/63012) - return; - } - - // Test relative dep-info paths in a workspace with --target with - // proc-macros and other dependency kinds. - Package::new("regdep", "0.1.0") - .file("src/lib.rs", "pub fn f() {}") - .publish(); - Package::new("pmdep", "0.1.0") - .file("src/lib.rs", "pub fn f() {}") - .publish(); - Package::new("bdep", "0.1.0") - .file("src/lib.rs", "pub fn f() {}") - .publish(); - - let p = project() - /*********** Workspace ***********/ - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - "#, - ) - /*********** Main Project ***********/ - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - pm = {path = "../pm"} - bar = {path = "../bar"} - regdep = "0.1" - - [build-dependencies] - bdep = "0.1" - bar = {path = "../bar"} - "#, - ) - .file( - "foo/src/main.rs", - r#" - pm::noop!{} - - fn main() { - bar::f(); - regdep::f(); - } - "#, - ) - .file("foo/build.rs", "fn main() { bdep::f(); }") - /*********** Proc Macro ***********/ - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - edition = "2018" - - [lib] - proc-macro = true - - [dependencies] - pmdep = "0.1" - "#, - ) - .file( - "pm/src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro] - pub fn noop(_item: TokenStream) -> TokenStream { - pmdep::f(); - "".parse().unwrap() - } - "#, - ) - /*********** Path Dependency `bar` ***********/ - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn f() {}") - .build(); - - let host = rustc_host(); - p.cargo("build -Z binary-dep-depinfo --target") - .arg(&host) - .masquerade_as_nightly_cargo() - .with_stderr_contains("[COMPILING] foo [..]") - .run(); - - assert_deps_contains( - &p, - "target/debug/.fingerprint/pm-*/dep-lib-pm", - &[(0, "src/lib.rs"), (1, "debug/deps/libpmdep-*.rlib")], - ); - - assert_deps_contains( - &p, - &format!("target/{}/debug/.fingerprint/foo-*/dep-bin-foo", host), - &[ - (0, "src/main.rs"), - ( - 1, - &format!( - "debug/deps/{}pm-*.{}", - paths::get_lib_prefix("proc-macro"), - paths::get_lib_extension("proc-macro") - ), - ), - (1, &format!("{}/debug/deps/libbar-*.rlib", host)), - (1, &format!("{}/debug/deps/libregdep-*.rlib", host)), - ], - ); - - assert_deps_contains( - &p, - "target/debug/.fingerprint/foo-*/dep-build-script-build-script-build", - &[(0, "build.rs"), (1, "debug/deps/libbdep-*.rlib")], - ); - - // Make sure it stays fresh. - p.cargo("build -Z binary-dep-depinfo --target") - .arg(&host) - .masquerade_as_nightly_cargo() - .with_stderr("[FINISHED] dev [..]") - .run(); -} - -#[cargo_test] -fn relative_depinfo_paths_no_ws() { - if !is_nightly() { - // -Z binary-dep-depinfo is unstable (https://github.com/rust-lang/rust/issues/63012) - return; - } - - // Test relative dep-info paths without a workspace with proc-macros and - // other dependency kinds. - Package::new("regdep", "0.1.0") - .file("src/lib.rs", "pub fn f() {}") - .publish(); - Package::new("pmdep", "0.1.0") - .file("src/lib.rs", "pub fn f() {}") - .publish(); - Package::new("bdep", "0.1.0") - .file("src/lib.rs", "pub fn f() {}") - .publish(); - - let p = project() - /*********** Main Project ***********/ - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - pm = {path = "pm"} - bar = {path = "bar"} - regdep = "0.1" - - [build-dependencies] - bdep = "0.1" - bar = {path = "bar"} - "#, - ) - .file( - "src/main.rs", - r#" - pm::noop!{} - - fn main() { - bar::f(); - regdep::f(); - } - "#, - ) - .file("build.rs", "fn main() { bdep::f(); }") - /*********** Proc Macro ***********/ - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - edition = "2018" - - [lib] - proc-macro = true - - [dependencies] - pmdep = "0.1" - "#, - ) - .file( - "pm/src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro] - pub fn noop(_item: TokenStream) -> TokenStream { - pmdep::f(); - "".parse().unwrap() - } - "#, - ) - /*********** Path Dependency `bar` ***********/ - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn f() {}") - .build(); - - p.cargo("build -Z binary-dep-depinfo") - .masquerade_as_nightly_cargo() - .with_stderr_contains("[COMPILING] foo [..]") - .run(); - - assert_deps_contains( - &p, - "target/debug/.fingerprint/pm-*/dep-lib-pm", - &[(0, "src/lib.rs"), (1, "debug/deps/libpmdep-*.rlib")], - ); - - assert_deps_contains( - &p, - "target/debug/.fingerprint/foo-*/dep-bin-foo", - &[ - (0, "src/main.rs"), - ( - 1, - &format!( - "debug/deps/{}pm-*.{}", - paths::get_lib_prefix("proc-macro"), - paths::get_lib_extension("proc-macro") - ), - ), - (1, "debug/deps/libbar-*.rlib"), - (1, "debug/deps/libregdep-*.rlib"), - ], - ); - - assert_deps_contains( - &p, - "target/debug/.fingerprint/foo-*/dep-build-script-build-script-build", - &[(0, "build.rs"), (1, "debug/deps/libbdep-*.rlib")], - ); - - // Make sure it stays fresh. - p.cargo("build -Z binary-dep-depinfo") - .masquerade_as_nightly_cargo() - .with_stderr("[FINISHED] dev [..]") - .run(); -} - -#[cargo_test] -fn reg_dep_source_not_tracked() { - // Make sure source files in dep-info file are not tracked for registry dependencies. - Package::new("regdep", "0.1.0") - .file("src/lib.rs", "pub fn f() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - regdep = "0.1" - "#, - ) - .file("src/lib.rs", "pub fn f() { regdep::f(); }") - .build(); - - p.cargo("build").run(); - - assert_deps( - &p, - "target/debug/.fingerprint/regdep-*/dep-lib-regdep", - |info_path, entries| { - for (kind, path) in entries { - if *kind == 1 { - panic!( - "Did not expect package root relative path type: {:?} in {:?}", - path, info_path - ); - } - } - }, - ); -} - -#[cargo_test] -fn canonical_path() { - if !is_nightly() { - // -Z binary-dep-depinfo is unstable (https://github.com/rust-lang/rust/issues/63012) - return; - } - if !cargo_test_support::symlink_supported() { - return; - } - Package::new("regdep", "0.1.0") - .file("src/lib.rs", "pub fn f() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - regdep = "0.1" - "#, - ) - .file("src/lib.rs", "pub fn f() { regdep::f(); }") - .build(); - - let real = p.root().join("real_target"); - real.mkdir_p(); - p.symlink(real, "target"); - - p.cargo("build -Z binary-dep-depinfo") - .masquerade_as_nightly_cargo() - .run(); - - assert_deps_contains( - &p, - "target/debug/.fingerprint/foo-*/dep-lib-foo", - &[(0, "src/lib.rs"), (1, "debug/deps/libregdep-*.rmeta")], - ); -} - -#[cargo_test] -fn non_local_build_script() { - // Non-local build script information is not included. - Package::new("bar", "1.0.0") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-changed=build.rs"); - } - "#, - ) - .file("src/lib.rs", "") - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - let contents = p.read_file("target/debug/foo.d"); - assert_match_exact( - "[ROOT]/foo/target/debug/foo[EXE]: [ROOT]/foo/src/main.rs", - &contents, - ); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/directory.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/directory.rs deleted file mode 100644 index aef2969e8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/directory.rs +++ /dev/null @@ -1,774 +0,0 @@ -//! Tests for directory sources. - -use std::collections::HashMap; -use std::fs; -use std::str; - -use serde::Serialize; - -use cargo_test_support::cargo_process; -use cargo_test_support::git; -use cargo_test_support::paths; -use cargo_test_support::registry::{cksum, Package}; -use cargo_test_support::{basic_manifest, project, t, ProjectBuilder}; - -fn setup() { - let root = paths::root(); - t!(fs::create_dir(&root.join(".cargo"))); - t!(fs::write( - root.join(".cargo/config"), - r#" - [source.crates-io] - replace-with = 'my-awesome-local-registry' - - [source.my-awesome-local-registry] - directory = 'index' - "# - )); -} - -struct VendorPackage { - p: Option, - cksum: Checksum, -} - -#[derive(Serialize)] -struct Checksum { - package: Option, - files: HashMap, -} - -impl VendorPackage { - fn new(name: &str) -> VendorPackage { - VendorPackage { - p: Some(project().at(&format!("index/{}", name))), - cksum: Checksum { - package: Some(String::new()), - files: HashMap::new(), - }, - } - } - - fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage { - self.p = Some(self.p.take().unwrap().file(name, contents)); - self.cksum - .files - .insert(name.to_string(), cksum(contents.as_bytes())); - self - } - - fn disable_checksum(&mut self) -> &mut VendorPackage { - self.cksum.package = None; - self - } - - fn no_manifest(mut self) -> Self { - self.p = self.p.map(|pb| pb.no_manifest()); - self - } - - fn build(&mut self) { - let p = self.p.take().unwrap(); - let json = serde_json::to_string(&self.cksum).unwrap(); - let p = p.file(".cargo-checksum.json", &json); - let _ = p.build(); - } -} - -#[cargo_test] -fn simple() { - setup(); - - VendorPackage::new("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn simple_install() { - setup(); - - VendorPackage::new("foo") - .file("src/lib.rs", "pub fn foo() {}") - .build(); - - VendorPackage::new("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - foo = "0.0.1" - "#, - ) - .file( - "src/main.rs", - "extern crate foo; pub fn main() { foo::foo(); }", - ) - .build(); - - cargo_process("install bar") - .with_stderr( - "\ -[INSTALLING] bar v0.1.0 -[COMPILING] foo v0.0.1 -[COMPILING] bar v0.1.0 -[FINISHED] release [optimized] target(s) in [..]s -[INSTALLING] [..]bar[..] -[INSTALLED] package `bar v0.1.0` (executable `bar[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); -} - -#[cargo_test] -fn simple_install_fail() { - setup(); - - VendorPackage::new("foo") - .file("src/lib.rs", "pub fn foo() {}") - .build(); - - VendorPackage::new("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - foo = "0.1.0" - baz = "9.8.7" - "#, - ) - .file( - "src/main.rs", - "extern crate foo; pub fn main() { foo::foo(); }", - ) - .build(); - - cargo_process("install bar") - .with_status(101) - .with_stderr( - " Installing bar v0.1.0 -error: failed to compile `bar v0.1.0`, intermediate artifacts can be found at `[..]` - -Caused by: - no matching package found - searched package name: `baz` - perhaps you meant: bar or foo - location searched: registry `crates-io` - required by package `bar v0.1.0` -", - ) - .run(); -} - -#[cargo_test] -fn install_without_feature_dep() { - setup(); - - VendorPackage::new("foo") - .file("src/lib.rs", "pub fn foo() {}") - .build(); - - VendorPackage::new("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - foo = "0.0.1" - baz = { version = "9.8.7", optional = true } - - [features] - wantbaz = ["baz"] - "#, - ) - .file( - "src/main.rs", - "extern crate foo; pub fn main() { foo::foo(); }", - ) - .build(); - - cargo_process("install bar") - .with_stderr( - "\ -[INSTALLING] bar v0.1.0 -[COMPILING] foo v0.0.1 -[COMPILING] bar v0.1.0 -[FINISHED] release [optimized] target(s) in [..]s -[INSTALLING] [..]bar[..] -[INSTALLED] package `bar v0.1.0` (executable `bar[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); -} - -#[cargo_test] -fn not_there() { - setup(); - - let _ = project().at("index").build(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: no matching package named `bar` found -location searched: [..] -required by package `foo v0.1.0 ([..])` -", - ) - .run(); -} - -#[cargo_test] -fn multiple() { - setup(); - - VendorPackage::new("bar-0.1.0") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .file(".cargo-checksum", "") - .build(); - - VendorPackage::new("bar-0.2.0") - .file("Cargo.toml", &basic_manifest("bar", "0.2.0")) - .file("src/lib.rs", "pub fn bar() {}") - .file(".cargo-checksum", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn crates_io_then_directory() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - let cksum = Package::new("bar", "0.1.0") - .file("src/lib.rs", "pub fn bar() -> u32 { 0 }") - .publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 ([..]) -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] [..] -", - ) - .run(); - - setup(); - - let mut v = VendorPackage::new("bar"); - v.file("Cargo.toml", &basic_manifest("bar", "0.1.0")); - v.file("src/lib.rs", "pub fn bar() -> u32 { 1 }"); - v.cksum.package = Some(cksum); - v.build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn crates_io_then_bad_checksum() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("bar", "0.1.0").publish(); - - p.cargo("build").run(); - setup(); - - VendorPackage::new("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: checksum for `bar v0.1.0` changed between lock files - -this could be indicative of a few possible errors: - - * the lock file is corrupt - * a replacement source in use (e.g., a mirror) returned a different checksum - * the source itself may be corrupt in one way or another - -unable to verify that `bar v0.1.0` is the same as when the lockfile was generated - -", - ) - .run(); -} - -#[cargo_test] -fn bad_file_checksum() { - setup(); - - VendorPackage::new("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - .build(); - - t!(fs::write( - paths::root().join("index/bar/src/lib.rs"), - "fn bar() -> u32 { 0 }" - )); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: the listed checksum of `[..]lib.rs` has changed: -expected: [..] -actual: [..] - -directory sources are not intended to be edited, if modifications are \ -required then it is recommended that `[patch]` is used with a forked copy of \ -the source -", - ) - .run(); -} - -#[cargo_test] -fn only_dot_files_ok() { - setup(); - - VendorPackage::new("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - .build(); - VendorPackage::new("foo") - .no_manifest() - .file(".bar", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn random_files_ok() { - setup(); - - VendorPackage::new("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - .build(); - VendorPackage::new("foo") - .no_manifest() - .file("bar", "") - .file("../test", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn git_lock_file_doesnt_change() { - let git = git::new("git", |p| { - p.file("Cargo.toml", &basic_manifest("git", "0.5.0")) - .file("src/lib.rs", "") - }); - - VendorPackage::new("git") - .file("Cargo.toml", &basic_manifest("git", "0.5.0")) - .file("src/lib.rs", "") - .disable_checksum() - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - git = {{ git = '{0}' }} - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - let lock1 = p.read_lockfile(); - - let root = paths::root(); - t!(fs::create_dir(&root.join(".cargo"))); - t!(fs::write( - root.join(".cargo/config"), - format!( - r#" - [source.my-git-repo] - git = '{}' - replace-with = 'my-awesome-local-registry' - - [source.my-awesome-local-registry] - directory = 'index' - "#, - git.url() - ) - )); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] [..] -[COMPILING] [..] -[FINISHED] [..] -", - ) - .run(); - - let lock2 = p.read_lockfile(); - assert_eq!(lock1, lock2, "lock files changed"); -} - -#[cargo_test] -fn git_override_requires_lockfile() { - VendorPackage::new("git") - .file("Cargo.toml", &basic_manifest("git", "0.5.0")) - .file("src/lib.rs", "") - .disable_checksum() - .build(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - git = { git = 'https://example.com/' } - "#, - ) - .file("src/lib.rs", "") - .build(); - - let root = paths::root(); - t!(fs::create_dir(&root.join(".cargo"))); - t!(fs::write( - root.join(".cargo/config"), - r#" - [source.my-git-repo] - git = 'https://example.com/' - replace-with = 'my-awesome-local-registry' - - [source.my-awesome-local-registry] - directory = 'index' - "# - )); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `git` as a dependency of package `foo v0.0.1 ([..])` - -Caused by: - failed to load source for dependency `git` - -Caused by: - Unable to update [..] - -Caused by: - the source my-git-repo requires a lock file to be present first before it can be - used against vendored source code - - remove the source replacement configuration, generate a lock file, and then - restore the source replacement configuration to continue the build -", - ) - .run(); -} - -#[cargo_test] -fn workspace_different_locations() { - let p = project() - .no_manifest() - .file( - "foo/Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - - [dependencies] - baz = "*" - "#, - ) - .file("foo/src/lib.rs", "") - .file("foo/vendor/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("foo/vendor/baz/src/lib.rs", "") - .file("foo/vendor/baz/.cargo-checksum.json", "{\"files\":{}}") - .file( - "bar/Cargo.toml", - r#" - [package] - name = 'bar' - version = '0.1.0' - - [dependencies] - baz = "*" - "#, - ) - .file("bar/src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - target-dir = './target' - - [source.crates-io] - replace-with = 'my-awesome-local-registry' - - [source.my-awesome-local-registry] - directory = 'foo/vendor' - "#, - ) - .build(); - - p.cargo("build").cwd("foo").run(); - p.cargo("build") - .cwd("bar") - .with_stderr( - "\ -[COMPILING] bar [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn version_missing() { - setup(); - - VendorPackage::new("foo") - .file("src/lib.rs", "pub fn foo() {}") - .build(); - - VendorPackage::new("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - foo = "2" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - cargo_process("install bar") - .with_stderr( - "\ -[INSTALLING] bar v0.1.0 -error: failed to compile [..] - -Caused by: - failed to select a version for the requirement `foo = \"^2\"` - candidate versions found which didn't match: 0.0.1 - location searched: directory source `[..] (which is replacing registry `[..]`) - required by package `bar v0.1.0` - perhaps a crate was updated and forgotten to be re-vendored? -", - ) - .with_status(101) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/doc.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/doc.rs deleted file mode 100644 index da500d4a1..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/doc.rs +++ /dev/null @@ -1,2635 +0,0 @@ -//! Tests for the `cargo doc` command. - -use cargo::core::compiler::RustDocFingerprint; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_lib_manifest, basic_manifest, git, project}; -use cargo_test_support::{is_nightly, rustc_host, symlink_supported, tools}; -use std::fs; -use std::str; - -#[cargo_test] -fn simple() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "pub fn foo() {}") - .build(); - - p.cargo("doc") - .with_stderr( - "\ -[..] foo v0.0.1 ([CWD]) -[..] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/foo/index.html").is_file()); -} - -#[cargo_test] -fn doc_no_libs() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "foo" - doc = false - "#, - ) - .file("src/main.rs", "bad code") - .build(); - - p.cargo("doc").run(); -} - -#[cargo_test] -fn doc_twice() { - let p = project().file("src/lib.rs", "pub fn foo() {}").build(); - - p.cargo("doc") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("doc").with_stdout("").run(); -} - -#[cargo_test] -fn doc_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/lib.rs", "extern crate bar; pub fn foo() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("doc") - .with_stderr( - "\ -[..] bar v0.0.1 ([CWD]/bar) -[..] bar v0.0.1 ([CWD]/bar) -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/foo/index.html").is_file()); - assert!(p.root().join("target/doc/bar/index.html").is_file()); - - // Verify that it only emits rmeta for the dependency. - assert_eq!(p.glob("target/debug/**/*.rlib").count(), 0); - assert_eq!(p.glob("target/debug/deps/libbar-*.rmeta").count(), 1); - - p.cargo("doc") - .env("CARGO_LOG", "cargo::ops::cargo_rustc::fingerprint") - .with_stdout("") - .run(); - - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/foo/index.html").is_file()); - assert!(p.root().join("target/doc/bar/index.html").is_file()); -} - -#[cargo_test] -fn doc_no_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/lib.rs", "extern crate bar; pub fn foo() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("doc --no-deps") - .with_stderr( - "\ -[CHECKING] bar v0.0.1 ([CWD]/bar) -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/foo/index.html").is_file()); - assert!(!p.root().join("target/doc/bar/index.html").is_file()); -} - -#[cargo_test] -fn doc_only_bin() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "extern crate bar; pub fn foo() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("doc -v").run(); - - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/bar/index.html").is_file()); - assert!(p.root().join("target/doc/foo/index.html").is_file()); -} - -#[cargo_test] -fn doc_multiple_targets_same_name_lib() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [lib] - name = "foo_lib" - "#, - ) - .file("foo/src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - [lib] - name = "foo_lib" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("doc --workspace") - .with_status(101) - .with_stderr( - "\ -error: document output filename collision -The lib `foo_lib` in package `foo v0.1.0 ([ROOT]/foo/foo)` has the same name as \ -the lib `foo_lib` in package `bar v0.1.0 ([ROOT]/foo/bar)`. -Only one may be documented at once since they output to the same path. -Consider documenting only one, renaming one, or marking one with `doc = false` in Cargo.toml. -", - ) - .run(); -} - -#[cargo_test] -fn doc_multiple_targets_same_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [[bin]] - name = "foo_lib" - path = "src/foo_lib.rs" - "#, - ) - .file("foo/src/foo_lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - [lib] - name = "foo_lib" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("doc --workspace") - .with_stderr_unordered( - "\ -warning: output filename collision. -The bin target `foo_lib` in package `foo v0.1.0 ([ROOT]/foo/foo)` \ -has the same output filename as the lib target `foo_lib` in package \ -`bar v0.1.0 ([ROOT]/foo/bar)`. -Colliding filename is: [ROOT]/foo/target/doc/foo_lib/index.html -The targets should have unique names. -This is a known bug where multiple crates with the same name use -the same path; see . -[DOCUMENTING] bar v0.1.0 ([ROOT]/foo/bar) -[DOCUMENTING] foo v0.1.0 ([ROOT]/foo/foo) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_multiple_targets_same_name_bin() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("foo/src/bin/foo-cli.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - "#, - ) - .file("bar/src/bin/foo-cli.rs", "") - .build(); - - p.cargo("doc --workspace") - .with_status(101) - .with_stderr( - "\ -error: document output filename collision -The bin `foo-cli` in package `foo v0.1.0 ([ROOT]/foo/foo)` has the same name as \ -the bin `foo-cli` in package `bar v0.1.0 ([ROOT]/foo/bar)`. -Only one may be documented at once since they output to the same path. -Consider documenting only one, renaming one, or marking one with `doc = false` in Cargo.toml. -", - ) - .run(); -} - -#[cargo_test] -fn doc_multiple_targets_same_name_undoced() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [[bin]] - name = "foo-cli" - "#, - ) - .file("foo/src/foo-cli.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - [[bin]] - name = "foo-cli" - doc = false - "#, - ) - .file("bar/src/foo-cli.rs", "") - .build(); - - p.cargo("doc --workspace").run(); -} - -#[cargo_test] -fn doc_lib_bin_same_name_documents_lib() { - let p = project() - .file( - "src/main.rs", - r#" - //! Binary documentation - extern crate foo; - fn main() { - foo::foo(); - } - "#, - ) - .file( - "src/lib.rs", - r#" - //! Library documentation - pub fn foo() {} - "#, - ) - .build(); - - p.cargo("doc") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - let doc_html = p.read_file("target/doc/foo/index.html"); - assert!(doc_html.contains("Library")); - assert!(!doc_html.contains("Binary")); -} - -#[cargo_test] -fn doc_lib_bin_same_name_documents_lib_when_requested() { - let p = project() - .file( - "src/main.rs", - r#" - //! Binary documentation - extern crate foo; - fn main() { - foo::foo(); - } - "#, - ) - .file( - "src/lib.rs", - r#" - //! Library documentation - pub fn foo() {} - "#, - ) - .build(); - - p.cargo("doc --lib") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - let doc_html = p.read_file("target/doc/foo/index.html"); - assert!(doc_html.contains("Library")); - assert!(!doc_html.contains("Binary")); -} - -#[cargo_test] -fn doc_lib_bin_same_name_documents_named_bin_when_requested() { - let p = project() - .file( - "src/main.rs", - r#" - //! Binary documentation - extern crate foo; - fn main() { - foo::foo(); - } - "#, - ) - .file( - "src/lib.rs", - r#" - //! Library documentation - pub fn foo() {} - "#, - ) - .build(); - - p.cargo("doc --bin foo") - // The checking/documenting lines are sometimes swapped since they run - // concurrently. - .with_stderr_unordered( - "\ -warning: output filename collision. -The bin target `foo` in package `foo v0.0.1 ([ROOT]/foo)` \ -has the same output filename as the lib target `foo` in package `foo v0.0.1 ([ROOT]/foo)`. -Colliding filename is: [ROOT]/foo/target/doc/foo/index.html -The targets should have unique names. -This is a known bug where multiple crates with the same name use -the same path; see . -[CHECKING] foo v0.0.1 ([CWD]) -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - let doc_html = p.read_file("target/doc/foo/index.html"); - assert!(!doc_html.contains("Library")); - assert!(doc_html.contains("Binary")); -} - -#[cargo_test] -fn doc_lib_bin_same_name_documents_bins_when_requested() { - let p = project() - .file( - "src/main.rs", - r#" - //! Binary documentation - extern crate foo; - fn main() { - foo::foo(); - } - "#, - ) - .file( - "src/lib.rs", - r#" - //! Library documentation - pub fn foo() {} - "#, - ) - .build(); - - p.cargo("doc --bins") - // The checking/documenting lines are sometimes swapped since they run - // concurrently. - .with_stderr_unordered( - "\ -warning: output filename collision. -The bin target `foo` in package `foo v0.0.1 ([ROOT]/foo)` \ -has the same output filename as the lib target `foo` in package `foo v0.0.1 ([ROOT]/foo)`. -Colliding filename is: [ROOT]/foo/target/doc/foo/index.html -The targets should have unique names. -This is a known bug where multiple crates with the same name use -the same path; see . -[CHECKING] foo v0.0.1 ([CWD]) -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - let doc_html = p.read_file("target/doc/foo/index.html"); - assert!(!doc_html.contains("Library")); - assert!(doc_html.contains("Binary")); -} - -#[cargo_test] -fn doc_lib_bin_example_same_name_documents_named_example_when_requested() { - let p = project() - .file( - "src/main.rs", - r#" - //! Binary documentation - extern crate foo; - fn main() { - foo::foo(); - } - "#, - ) - .file( - "src/lib.rs", - r#" - //! Library documentation - pub fn foo() {} - "#, - ) - .file( - "examples/ex1.rs", - r#" - //! Example1 documentation - pub fn x() { f(); } - "#, - ) - .build(); - - p.cargo("doc --example ex1") - // The checking/documenting lines are sometimes swapped since they run - // concurrently. - .with_stderr_unordered( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); - - let doc_html = p.read_file("target/doc/ex1/index.html"); - assert!(!doc_html.contains("Library")); - assert!(!doc_html.contains("Binary")); - assert!(doc_html.contains("Example1")); -} - -#[cargo_test] -fn doc_lib_bin_example_same_name_documents_examples_when_requested() { - let p = project() - .file( - "src/main.rs", - r#" - //! Binary documentation - extern crate foo; - fn main() { - foo::foo(); - } - "#, - ) - .file( - "src/lib.rs", - r#" - //! Library documentation - pub fn foo() {} - "#, - ) - .file( - "examples/ex1.rs", - r#" - //! Example1 documentation - pub fn example1() { f(); } - "#, - ) - .file( - "examples/ex2.rs", - r#" - //! Example2 documentation - pub fn example2() { f(); } - "#, - ) - .build(); - - p.cargo("doc --examples") - // The checking/documenting lines are sometimes swapped since they run - // concurrently. - .with_stderr_unordered( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); - - let example_doc_html_1 = p.read_file("target/doc/ex1/index.html"); - let example_doc_html_2 = p.read_file("target/doc/ex2/index.html"); - - assert!(!example_doc_html_1.contains("Library")); - assert!(!example_doc_html_1.contains("Binary")); - - assert!(!example_doc_html_2.contains("Library")); - assert!(!example_doc_html_2.contains("Binary")); - - assert!(example_doc_html_1.contains("Example1")); - assert!(example_doc_html_2.contains("Example2")); -} - -#[cargo_test] -fn doc_dash_p() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "extern crate a;") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies.b] - path = "../b" - "#, - ) - .file("a/src/lib.rs", "extern crate b;") - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("doc -p a") - .with_stderr( - "\ -[..] b v0.0.1 ([CWD]/b) -[..] b v0.0.1 ([CWD]/b) -[DOCUMENTING] a v0.0.1 ([CWD]/a) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_all_exclude() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("doc --workspace --exclude baz") - .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]") - .with_stderr( - "\ -[DOCUMENTING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_all_exclude_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("doc --workspace --exclude '*z'") - .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]") - .with_stderr( - "\ -[DOCUMENTING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_same_name() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/main.rs", "fn main() {}") - .file("examples/main.rs", "fn main() {}") - .file("tests/main.rs", "fn main() {}") - .build(); - - p.cargo("doc").run(); -} - -#[cargo_test] -fn doc_target() { - if !is_nightly() { - // no_core, lang_items requires nightly. - return; - } - const TARGET: &str = "arm-unknown-linux-gnueabihf"; - - let p = project() - .file( - "src/lib.rs", - r#" - #![feature(no_core, lang_items)] - #![no_core] - - #[lang = "sized"] - trait Sized {} - - extern { - pub static A: u32; - } - "#, - ) - .build(); - - p.cargo("doc --verbose --target").arg(TARGET).run(); - assert!(p.root().join(&format!("target/{}/doc", TARGET)).is_dir()); - assert!(p - .root() - .join(&format!("target/{}/doc/foo/index.html", TARGET)) - .is_file()); -} - -#[cargo_test] -fn target_specific_not_documented() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.foo.dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "not rust") - .build(); - - p.cargo("doc").run(); -} - -#[cargo_test] -fn output_not_captured() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file( - "a/src/lib.rs", - " - /// ``` - /// ` - /// ``` - pub fn foo() {} - ", - ) - .build(); - - p.cargo("doc") - .with_stderr_contains("[..]unknown start of token: `") - .run(); -} - -#[cargo_test] -fn target_specific_documented() { - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.foo.dependencies] - a = {{ path = "a" }} - [target.{}.dependencies] - a = {{ path = "a" }} - "#, - rustc_host() - ), - ) - .file( - "src/lib.rs", - " - extern crate a; - - /// test - pub fn foo() {} - ", - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file( - "a/src/lib.rs", - " - /// test - pub fn foo() {} - ", - ) - .build(); - - p.cargo("doc").run(); -} - -#[cargo_test] -fn no_document_build_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [build-dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file( - "a/src/lib.rs", - " - /// ``` - /// โ˜ƒ - /// ``` - pub fn foo() {} - ", - ) - .build(); - - p.cargo("doc").run(); -} - -#[cargo_test] -fn doc_release() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build --release").run(); - p.cargo("doc --release -v") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([..]) -[RUNNING] `rustdoc [..] src/lib.rs [..]` -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_multiple_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - - [dependencies.baz] - path = "baz" - "#, - ) - .file("src/lib.rs", "extern crate bar; pub fn foo() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("doc -p bar -p baz -v").run(); - - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/bar/index.html").is_file()); - assert!(p.root().join("target/doc/baz/index.html").is_file()); -} - -#[cargo_test] -fn features() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - - [features] - foo = ["bar/bar"] - "#, - ) - .file("src/lib.rs", r#"#[cfg(feature = "foo")] pub fn foo() {}"#) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - bar = [] - "#, - ) - .file( - "bar/build.rs", - r#" - fn main() { - println!("cargo:rustc-cfg=bar"); - } - "#, - ) - .file( - "bar/src/lib.rs", - r#"#[cfg(feature = "bar")] pub fn bar() {}"#, - ) - .build(); - p.cargo("doc --features foo") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 [..] -[DOCUMENTING] bar v0.0.1 [..] -[DOCUMENTING] foo v0.0.1 [..] -[FINISHED] [..] -", - ) - .run(); - assert!(p.root().join("target/doc").is_dir()); - assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); - assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); - // Check that turning the feature off will remove the files. - p.cargo("doc") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 [..] -[DOCUMENTING] bar v0.0.1 [..] -[DOCUMENTING] foo v0.0.1 [..] -[FINISHED] [..] -", - ) - .run(); - assert!(!p.root().join("target/doc/foo/fn.foo.html").is_file()); - assert!(!p.root().join("target/doc/bar/fn.bar.html").is_file()); - // And switching back will rebuild and bring them back. - p.cargo("doc --features foo") - .with_stderr( - "\ -[DOCUMENTING] bar v0.0.1 [..] -[DOCUMENTING] foo v0.0.1 [..] -[FINISHED] [..] -", - ) - .run(); - assert!(p.root().join("target/doc/foo/fn.foo.html").is_file()); - assert!(p.root().join("target/doc/bar/fn.bar.html").is_file()); -} - -#[cargo_test] -fn rerun_when_dir_removed() { - let p = project() - .file( - "src/lib.rs", - r#" - /// dox - pub fn foo() {} - "#, - ) - .build(); - - p.cargo("doc").run(); - assert!(p.root().join("target/doc/foo/index.html").is_file()); - - fs::remove_dir_all(p.root().join("target/doc/foo")).unwrap(); - - p.cargo("doc").run(); - assert!(p.root().join("target/doc/foo/index.html").is_file()); -} - -#[cargo_test] -fn document_only_lib() { - let p = project() - .file( - "src/lib.rs", - r#" - /// dox - pub fn foo() {} - "#, - ) - .file( - "src/bin/bar.rs", - r#" - /// ``` - /// โ˜ƒ - /// ``` - pub fn foo() {} - fn main() { foo(); } - "#, - ) - .build(); - p.cargo("doc --lib").run(); - assert!(p.root().join("target/doc/foo/index.html").is_file()); -} - -#[cargo_test] -fn plugins_no_use_target() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - proc-macro = true - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("doc --target=x86_64-unknown-openbsd -v").run(); -} - -#[cargo_test] -fn doc_all_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - // The order in which bar is compiled or documented is not deterministic - p.cargo("doc --workspace") - .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") - .with_stderr_contains("[..] Checking bar v0.1.0 ([..])") - .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") - .run(); -} - -#[cargo_test] -fn doc_all_virtual_manifest() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - // The order in which bar and baz are documented is not guaranteed - p.cargo("doc --workspace") - .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])") - .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") - .run(); -} - -#[cargo_test] -fn doc_virtual_manifest_all_implied() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - // The order in which bar and baz are documented is not guaranteed - p.cargo("doc") - .with_stderr_contains("[..] Documenting baz v0.1.0 ([..])") - .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") - .run(); -} - -#[cargo_test] -fn doc_virtual_manifest_one_project() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }") - .build(); - - p.cargo("doc -p bar") - .with_stderr_does_not_contain("[DOCUMENTING] baz v0.1.0 [..]") - .with_stderr( - "\ -[DOCUMENTING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_virtual_manifest_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("doc -p '*z'") - .with_stderr_does_not_contain("[DOCUMENTING] bar v0.1.0 [..]") - .with_stderr( - "\ -[DOCUMENTING] baz v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_all_member_dependency_same_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - Package::new("bar", "0.1.0").publish(); - - p.cargo("doc --workspace") - .with_stderr_unordered( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 (registry `dummy-registry`) -warning: output filename collision. -The lib target `bar` in package `bar v0.1.0` has the same output filename as \ -the lib target `bar` in package `bar v0.1.0 ([ROOT]/foo/bar)`. -Colliding filename is: [ROOT]/foo/target/doc/bar/index.html -The targets should have unique names. -This is a known bug where multiple crates with the same name use -the same path; see . -[DOCUMENTING] bar v0.1.0 -[CHECKING] bar v0.1.0 -[DOCUMENTING] bar v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_workspace_open_help_message() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar"] - "#, - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - // The order in which bar is compiled or documented is not deterministic - p.cargo("doc --workspace --open") - .env("BROWSER", tools::echo()) - .with_stderr_contains("[..] Documenting bar v0.1.0 ([..])") - .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") - .with_stderr_contains("[..] Opening [..]/bar/index.html") - .run(); -} - -#[cargo_test] -fn doc_extern_map_local() { - if !is_nightly() { - // -Zextern-html-root-url is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file(".cargo/config.toml", "doc.extern-map.std = 'local'") - .build(); - - p.cargo("doc -v --no-deps -Zrustdoc-map --open") - .env("BROWSER", tools::echo()) - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[DOCUMENTING] foo v0.1.0 [..] -[RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..]--crate-version 0.1.0` -[FINISHED] [..] - Opening [CWD]/target/doc/foo/index.html -", - ) - .run(); -} - -#[cargo_test] -fn open_no_doc_crate() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [lib] - doc = false - "#, - ) - .file("src/lib.rs", "#[cfg(feature)] pub fn f();") - .build(); - - p.cargo("doc --open") - .env("BROWSER", "do_not_run_me") - .with_status(101) - .with_stderr_contains("error: no crates with documentation") - .run(); -} - -#[cargo_test] -fn doc_workspace_open_different_library_and_package_names() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [lib] - name = "foolib" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("doc --open") - .env("BROWSER", tools::echo()) - .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") - .with_stderr_contains("[..] [CWD]/target/doc/foolib/index.html") - .with_stdout_contains("[CWD]/target/doc/foolib/index.html") - .run(); - - p.change_file( - ".cargo/config.toml", - &format!( - r#" - [doc] - browser = ["{}", "a"] - "#, - tools::echo().display().to_string().replace('\\', "\\\\") - ), - ); - - // check that the cargo config overrides the browser env var - p.cargo("doc --open") - .env("BROWSER", "do_not_run_me") - .with_stdout_contains("a [CWD]/target/doc/foolib/index.html") - .run(); -} - -#[cargo_test] -fn doc_workspace_open_binary() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [[bin]] - name = "foobin" - path = "src/main.rs" - "#, - ) - .file("foo/src/main.rs", "") - .build(); - - p.cargo("doc --open") - .env("BROWSER", tools::echo()) - .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") - .with_stderr_contains("[..] Opening [CWD]/target/doc/foobin/index.html") - .run(); -} - -#[cargo_test] -fn doc_workspace_open_binary_and_library() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [lib] - name = "foolib" - [[bin]] - name = "foobin" - path = "src/main.rs" - "#, - ) - .file("foo/src/lib.rs", "") - .file("foo/src/main.rs", "") - .build(); - - p.cargo("doc --open") - .env("BROWSER", tools::echo()) - .with_stderr_contains("[..] Documenting foo v0.1.0 ([..])") - .with_stderr_contains("[..] Opening [CWD]/target/doc/foolib/index.html") - .run(); -} - -#[cargo_test] -fn doc_edition() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - edition = "2018" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("doc -v") - .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") - .run(); - - p.cargo("test -v") - .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") - .run(); -} - -#[cargo_test] -fn doc_target_edition() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - edition = "2018" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("doc -v") - .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") - .run(); - - p.cargo("test -v") - .with_stderr_contains("[RUNNING] `rustdoc [..]--edition=2018[..]") - .run(); -} - -// Tests an issue where depending on different versions of the same crate depending on `cfg`s -// caused `cargo doc` to fail. -#[cargo_test] -fn issue_5345() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.'cfg(all(windows, target_arch = "x86"))'.dependencies] - bar = "0.1" - - [target.'cfg(not(all(windows, target_arch = "x86")))'.dependencies] - bar = "0.2" - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .build(); - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.2.0").publish(); - - foo.cargo("build").run(); - foo.cargo("doc").run(); -} - -#[cargo_test] -fn doc_private_items() { - let foo = project() - .file("src/lib.rs", "mod private { fn private_item() {} }") - .build(); - foo.cargo("doc --document-private-items").run(); - - assert!(foo.root().join("target/doc").is_dir()); - assert!(foo - .root() - .join("target/doc/foo/private/index.html") - .is_file()); -} - -#[cargo_test] -fn doc_private_ws() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "fn p() {}") - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "fn p2() {}") - .file("b/src/bin/b-cli.rs", "fn main() {}") - .build(); - p.cargo("doc --workspace --bins --lib --document-private-items -v") - .with_stderr_contains( - "[RUNNING] `rustdoc [..] a/src/lib.rs [..]--document-private-items[..]", - ) - .with_stderr_contains( - "[RUNNING] `rustdoc [..] b/src/lib.rs [..]--document-private-items[..]", - ) - .with_stderr_contains( - "[RUNNING] `rustdoc [..] b/src/bin/b-cli.rs [..]--document-private-items[..]", - ) - .run(); -} - -const BAD_INTRA_LINK_LIB: &str = r#" -#![deny(broken_intra_doc_links)] - -/// [bad_link] -pub fn foo() {} -"#; - -#[cargo_test] -fn doc_cap_lints() { - let a = git::new("a", |p| { - p.file("Cargo.toml", &basic_lib_manifest("a")) - .file("src/lib.rs", BAD_INTRA_LINK_LIB) - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = {{ git = '{}' }} - "#, - a.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("doc") - .with_stderr_unordered( - "\ -[UPDATING] git repository `[..]` -[DOCUMENTING] a v0.5.0 ([..]) -[CHECKING] a v0.5.0 ([..]) -[DOCUMENTING] foo v0.0.1 ([..]) -[FINISHED] dev [..] -", - ) - .run(); - - p.root().join("target").rm_rf(); - - p.cargo("doc -vv") - .with_stderr_contains("[WARNING] [..]`bad_link`[..]") - .run(); -} - -#[cargo_test] -fn doc_message_format() { - let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build(); - - p.cargo("doc --message-format=json") - .with_status(101) - .with_json_contains_unordered( - r#" - { - "message": { - "children": "{...}", - "code": "{...}", - "level": "error", - "message": "{...}", - "rendered": "{...}", - "spans": "{...}" - }, - "package_id": "foo [..]", - "manifest_path": "[..]", - "reason": "compiler-message", - "target": "{...}" - } - "#, - ) - .run(); -} - -#[cargo_test] -fn doc_json_artifacts() { - // Checks the output of json artifact messages. - let p = project() - .file("src/lib.rs", "") - .file("src/bin/somebin.rs", "fn main() {}") - .build(); - - p.cargo("doc --message-format=json") - .with_json_contains_unordered( - r#" -{ - "reason": "compiler-artifact", - "package_id": "foo 0.0.1 [..]", - "manifest_path": "[ROOT]/foo/Cargo.toml", - "target": - { - "kind": ["lib"], - "crate_types": ["lib"], - "name": "foo", - "src_path": "[ROOT]/foo/src/lib.rs", - "edition": "2015", - "doc": true, - "doctest": true, - "test": true - }, - "profile": "{...}", - "features": [], - "filenames": ["[ROOT]/foo/target/debug/deps/libfoo-[..].rmeta"], - "executable": null, - "fresh": false -} - -{ - "reason": "compiler-artifact", - "package_id": "foo 0.0.1 [..]", - "manifest_path": "[ROOT]/foo/Cargo.toml", - "target": - { - "kind": ["lib"], - "crate_types": ["lib"], - "name": "foo", - "src_path": "[ROOT]/foo/src/lib.rs", - "edition": "2015", - "doc": true, - "doctest": true, - "test": true - }, - "profile": "{...}", - "features": [], - "filenames": ["[ROOT]/foo/target/doc/foo/index.html"], - "executable": null, - "fresh": false -} - -{ - "reason": "compiler-artifact", - "package_id": "foo 0.0.1 [..]", - "manifest_path": "[ROOT]/foo/Cargo.toml", - "target": - { - "kind": ["bin"], - "crate_types": ["bin"], - "name": "somebin", - "src_path": "[ROOT]/foo/src/bin/somebin.rs", - "edition": "2015", - "doc": true, - "doctest": false, - "test": true - }, - "profile": "{...}", - "features": [], - "filenames": ["[ROOT]/foo/target/doc/somebin/index.html"], - "executable": null, - "fresh": false -} - -{"reason":"build-finished","success":true} -"#, - ) - .run(); -} - -#[cargo_test] -fn short_message_format() { - let p = project().file("src/lib.rs", BAD_INTRA_LINK_LIB).build(); - p.cargo("doc --message-format=short") - .with_status(101) - .with_stderr_contains("src/lib.rs:4:6: error: [..]`bad_link`[..]") - .run(); -} - -#[cargo_test] -fn doc_example() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [[example]] - crate-type = ["lib"] - name = "ex1" - doc = true - "#, - ) - .file("src/lib.rs", "pub fn f() {}") - .file( - "examples/ex1.rs", - r#" - use foo::f; - - /// Example - pub fn x() { f(); } - "#, - ) - .build(); - - p.cargo("doc").run(); - assert!(p - .build_dir() - .join("doc") - .join("ex1") - .join("fn.x.html") - .exists()); -} - -#[cargo_test] -fn bin_private_items() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#, - ) - .file( - "src/main.rs", - " - pub fn foo_pub() {} - fn foo_priv() {} - struct FooStruct; - enum FooEnum {} - trait FooTrait {} - type FooType = u32; - mod foo_mod {} - - ", - ) - .build(); - - p.cargo("doc") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - assert!(p.root().join("target/doc/foo/index.html").is_file()); - assert!(p.root().join("target/doc/foo/fn.foo_pub.html").is_file()); - assert!(p.root().join("target/doc/foo/fn.foo_priv.html").is_file()); - assert!(p - .root() - .join("target/doc/foo/struct.FooStruct.html") - .is_file()); - assert!(p.root().join("target/doc/foo/enum.FooEnum.html").is_file()); - assert!(p - .root() - .join("target/doc/foo/trait.FooTrait.html") - .is_file()); - assert!(p.root().join("target/doc/foo/type.FooType.html").is_file()); - assert!(p.root().join("target/doc/foo/foo_mod/index.html").is_file()); -} - -#[cargo_test] -fn bin_private_items_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - "src/main.rs", - " - fn foo_priv() {} - pub fn foo_pub() {} - ", - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file( - "bar/src/lib.rs", - " - #[allow(dead_code)] - fn bar_priv() {} - pub fn bar_pub() {} - ", - ) - .build(); - - p.cargo("doc") - .with_stderr_unordered( - "\ -[DOCUMENTING] bar v0.0.1 ([..]) -[CHECKING] bar v0.0.1 ([..]) -[DOCUMENTING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - assert!(p.root().join("target/doc/foo/index.html").is_file()); - assert!(p.root().join("target/doc/foo/fn.foo_pub.html").is_file()); - assert!(p.root().join("target/doc/foo/fn.foo_priv.html").is_file()); - - assert!(p.root().join("target/doc/bar/index.html").is_file()); - assert!(p.root().join("target/doc/bar/fn.bar_pub.html").is_file()); - assert!(!p.root().join("target/doc/bar/fn.bar_priv.html").exists()); -} - -#[cargo_test] -fn crate_versions() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.2.4" - authors = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("doc -v") - .with_stderr( - "\ -[DOCUMENTING] foo v1.2.4 [..] -[RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..]--crate-version 1.2.4` -[FINISHED] [..] -", - ) - .run(); - - let output_path = p.root().join("target/doc/foo/index.html"); - let output_documentation = fs::read_to_string(&output_path).unwrap(); - - assert!(output_documentation.contains("Version 1.2.4")); -} - -#[cargo_test] -fn crate_versions_flag_is_overridden() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.2.4" - authors = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - let output_documentation = || { - let output_path = p.root().join("target/doc/foo/index.html"); - fs::read_to_string(&output_path).unwrap() - }; - let asserts = |html: String| { - assert!(!html.contains("1.2.4")); - assert!(html.contains("Version 2.0.3")); - }; - - p.cargo("doc") - .env("RUSTDOCFLAGS", "--crate-version 2.0.3") - .run(); - asserts(output_documentation()); - - p.build_dir().rm_rf(); - - p.cargo("rustdoc -- --crate-version 2.0.3").run(); - asserts(output_documentation()); -} - -#[cargo_test] -fn doc_test_in_workspace() { - if !is_nightly() { - // -Zdoctest-in-workspace is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = [ - "crate-a", - "crate-b", - ] - "#, - ) - .file( - "crate-a/Cargo.toml", - r#" - [project] - name = "crate-a" - version = "0.1.0" - "#, - ) - .file( - "crate-a/src/lib.rs", - "\ - //! ``` - //! assert_eq!(1, 1); - //! ``` - ", - ) - .file( - "crate-b/Cargo.toml", - r#" - [project] - name = "crate-b" - version = "0.1.0" - "#, - ) - .file( - "crate-b/src/lib.rs", - "\ - //! ``` - //! assert_eq!(1, 1); - //! ``` - ", - ) - .build(); - p.cargo("test -Zdoctest-in-workspace --doc -vv") - .masquerade_as_nightly_cargo() - .with_stderr_contains("[DOCTEST] crate-a") - .with_stdout_contains( - " -running 1 test -test crate-a/src/lib.rs - (line 1) ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] - -", - ) - .with_stderr_contains("[DOCTEST] crate-b") - .with_stdout_contains( - " -running 1 test -test crate-b/src/lib.rs - (line 1) ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] - -", - ) - .run(); -} - -#[cargo_test] -fn doc_fingerprint_is_versioning_consistent() { - // Random rustc verbose version - let old_rustc_verbose_version = format!( - "\ -rustc 1.41.1 (f3e1a954d 2020-02-24) -binary: rustc -commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196 -commit-date: 2020-02-24 -host: {} -release: 1.41.1 -LLVM version: 9.0 -", - rustc_host() - ); - - // Create the dummy project. - let dummy_project = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.2.4" - authors = [] - "#, - ) - .file("src/lib.rs", "//! These are the docs!") - .build(); - - dummy_project.cargo("doc").run(); - - let fingerprint: RustDocFingerprint = - serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) - .expect("JSON Serde fail"); - - // Check that the fingerprint contains the actual rustc version - // which has been used to compile the docs. - let output = std::process::Command::new("rustc") - .arg("-vV") - .output() - .expect("Failed to get actual rustc verbose version"); - assert_eq!( - fingerprint.rustc_vv, - (String::from_utf8_lossy(&output.stdout).as_ref()) - ); - - // As the test shows above. Now we have generated the `doc/` folder and inside - // the rustdoc fingerprint file is located with the correct rustc version. - // So we will remove it and create a new fingerprint with an old rustc version - // inside it. We will also place a bogus file inside of the `doc/` folder to ensure - // it gets removed as we expect on the next doc compilation. - dummy_project.change_file( - "target/.rustdoc_fingerprint.json", - &old_rustc_verbose_version, - ); - - fs::write( - dummy_project.build_dir().join("doc/bogus_file"), - String::from("This is a bogus file and should be removed!"), - ) - .expect("Error writing test bogus file"); - - // Now if we trigger another compilation, since the fingerprint contains an old version - // of rustc, cargo should remove the entire `/doc` folder (including the fingerprint) - // and generating another one with the actual version. - // It should also remove the bogus file we created above. - dummy_project.cargo("doc").run(); - - assert!(!dummy_project.build_dir().join("doc/bogus_file").exists()); - - let fingerprint: RustDocFingerprint = - serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) - .expect("JSON Serde fail"); - - // Check that the fingerprint contains the actual rustc version - // which has been used to compile the docs. - assert_eq!( - fingerprint.rustc_vv, - (String::from_utf8_lossy(&output.stdout).as_ref()) - ); -} - -#[cargo_test] -fn doc_fingerprint_respects_target_paths() { - // Random rustc verbose version - let old_rustc_verbose_version = format!( - "\ -rustc 1.41.1 (f3e1a954d 2020-02-24) -binary: rustc -commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196 -commit-date: 2020-02-24 -host: {} -release: 1.41.1 -LLVM version: 9.0 -", - rustc_host() - ); - - // Create the dummy project. - let dummy_project = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.2.4" - authors = [] - "#, - ) - .file("src/lib.rs", "//! These are the docs!") - .build(); - - dummy_project.cargo("doc --target").arg(rustc_host()).run(); - - let fingerprint: RustDocFingerprint = - serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) - .expect("JSON Serde fail"); - - // Check that the fingerprint contains the actual rustc version - // which has been used to compile the docs. - let output = std::process::Command::new("rustc") - .arg("-vV") - .output() - .expect("Failed to get actual rustc verbose version"); - assert_eq!( - fingerprint.rustc_vv, - (String::from_utf8_lossy(&output.stdout).as_ref()) - ); - - // As the test shows above. Now we have generated the `doc/` folder and inside - // the rustdoc fingerprint file is located with the correct rustc version. - // So we will remove it and create a new fingerprint with an old rustc version - // inside it. We will also place a bogus file inside of the `doc/` folder to ensure - // it gets removed as we expect on the next doc compilation. - dummy_project.change_file( - "target/.rustdoc_fingerprint.json", - &old_rustc_verbose_version, - ); - - fs::write( - dummy_project - .build_dir() - .join(rustc_host()) - .join("doc/bogus_file"), - String::from("This is a bogus file and should be removed!"), - ) - .expect("Error writing test bogus file"); - - // Now if we trigger another compilation, since the fingerprint contains an old version - // of rustc, cargo should remove the entire `/doc` folder (including the fingerprint) - // and generating another one with the actual version. - // It should also remove the bogus file we created above. - dummy_project.cargo("doc --target").arg(rustc_host()).run(); - - assert!(!dummy_project - .build_dir() - .join(rustc_host()) - .join("doc/bogus_file") - .exists()); - - let fingerprint: RustDocFingerprint = - serde_json::from_str(&dummy_project.read_file("target/.rustdoc_fingerprint.json")) - .expect("JSON Serde fail"); - - // Check that the fingerprint contains the actual rustc version - // which has been used to compile the docs. - assert_eq!( - fingerprint.rustc_vv, - (String::from_utf8_lossy(&output.stdout).as_ref()) - ); -} - -#[cargo_test] -fn doc_fingerprint_unusual_behavior() { - // Checks for some unusual circumstances with clearing the doc directory. - if !symlink_supported() { - return; - } - let p = project().file("src/lib.rs", "").build(); - p.build_dir().mkdir_p(); - let real_doc = p.root().join("doc"); - real_doc.mkdir_p(); - let build_doc = p.build_dir().join("doc"); - p.symlink(&real_doc, &build_doc); - fs::write(real_doc.join("somefile"), "test").unwrap(); - fs::write(real_doc.join(".hidden"), "test").unwrap(); - p.cargo("doc").run(); - // Make sure for the first run, it does not delete any files and does not - // break the symlink. - assert!(build_doc.join("somefile").exists()); - assert!(real_doc.join("somefile").exists()); - assert!(real_doc.join(".hidden").exists()); - assert!(real_doc.join("foo/index.html").exists()); - // Pretend that the last build was generated by an older version. - p.change_file( - "target/.rustdoc_fingerprint.json", - "{\"rustc_vv\": \"I am old\"}", - ); - // Change file to trigger a new build. - p.change_file("src/lib.rs", "// changed"); - p.cargo("doc") - .with_stderr( - "[DOCUMENTING] foo [..]\n\ - [FINISHED] [..]", - ) - .run(); - // This will delete somefile, but not .hidden. - assert!(!real_doc.join("somefile").exists()); - assert!(real_doc.join(".hidden").exists()); - assert!(real_doc.join("foo/index.html").exists()); - // And also check the -Z flag behavior. - p.change_file( - "target/.rustdoc_fingerprint.json", - "{\"rustc_vv\": \"I am old\"}", - ); - // Change file to trigger a new build. - p.change_file("src/lib.rs", "// changed2"); - fs::write(real_doc.join("somefile"), "test").unwrap(); - p.cargo("doc -Z skip-rustdoc-fingerprint") - .masquerade_as_nightly_cargo() - .with_stderr( - "[DOCUMENTING] foo [..]\n\ - [FINISHED] [..]", - ) - .run(); - // Should not have deleted anything. - assert!(build_doc.join("somefile").exists()); - assert!(real_doc.join("somefile").exists()); -} - -#[cargo_test] -fn scrape_examples_basic() { - if !is_nightly() { - // -Z rustdoc-scrape-examples is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#, - ) - .file("examples/ex.rs", "fn main() { foo::foo(); }") - .file("src/lib.rs", "pub fn foo() {}\npub fn bar() { foo(); }") - .build(); - - p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples=all") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[..] foo v0.0.1 ([CWD]) -[..] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - let doc_html = p.read_file("target/doc/foo/fn.foo.html"); - assert!(doc_html.contains("Examples found in repository")); - assert!(doc_html.contains("More examples")); - - // Ensure that the reverse-dependency has its sources generated - assert!(p.build_dir().join("doc/src/ex/ex.rs.html").exists()); -} - -#[cargo_test] -fn scrape_examples_avoid_build_script_cycle() { - if !is_nightly() { - // -Z rustdoc-scrape-examples is unstable - return; - } - - let p = project() - // package with build dependency - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - links = "foo" - - [workspace] - members = ["bar"] - - [build-dependencies] - bar = {path = "bar"} - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main(){}") - // dependency - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - links = "bar" - "#, - ) - .file("bar/src/lib.rs", "") - .file("bar/build.rs", "fn main(){}") - .build(); - - p.cargo("doc --all -Zunstable-options -Z rustdoc-scrape-examples=all") - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn scrape_examples_complex_reverse_dependencies() { - if !is_nightly() { - // -Z rustdoc-scrape-examples is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies] - a = {path = "a", features = ["feature"]} - b = {path = "b"} - - [workspace] - members = ["b"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex.rs", "fn main() { a::f(); }") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [lib] - proc-macro = true - - [dependencies] - b = {path = "../b"} - - [features] - feature = [] - "#, - ) - .file("a/src/lib.rs", "#[cfg(feature)] pub fn f();") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples=all") - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn scrape_examples_crate_with_dash() { - if !is_nightly() { - // -Z rustdoc-scrape-examples is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "da-sh" - version = "0.0.1" - authors = [] - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file("examples/a.rs", "fn main() { da_sh::foo(); }") - .build(); - - p.cargo("doc -Zunstable-options -Z rustdoc-scrape-examples=all") - .masquerade_as_nightly_cargo() - .run(); - - let doc_html = p.read_file("target/doc/da_sh/fn.foo.html"); - assert!(doc_html.contains("Examples found in repository")); -} - -#[cargo_test] -fn scrape_examples_missing_flag() { - if !is_nightly() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.2.4" - authors = [] - "#, - ) - .file("src/lib.rs", "//! These are the docs!") - .build(); - p.cargo("doc -Zrustdoc-scrape-examples") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("error: -Z rustdoc-scrape-examples must take [..] an argument") - .run(); -} - -#[cargo_test] -fn lib_before_bin() { - // Checks that the library is documented before the binary. - // Previously they were built concurrently, which can cause issues - // if the bin has intra-doc links to the lib. - let p = project() - .file( - "src/lib.rs", - r#" - /// Hi - pub fn abc() {} - "#, - ) - .file( - "src/bin/somebin.rs", - r#" - //! See [`foo::abc`] - fn main() {} - "#, - ) - .build(); - - // Run check first. This just helps ensure that the test clearly shows the - // order of the rustdoc commands. - p.cargo("check").run(); - - // The order of output here should be deterministic. - p.cargo("doc -v") - .with_stderr( - "\ -[DOCUMENTING] foo [..] -[RUNNING] `rustdoc --crate-type lib --crate-name foo src/lib.rs [..] -[RUNNING] `rustdoc --crate-type bin --crate-name somebin src/bin/somebin.rs [..] -[FINISHED] [..] -", - ) - .run(); - - // And the link should exist. - let bin_html = p.read_file("target/doc/somebin/index.html"); - assert!(bin_html.contains("../foo/fn.abc.html")); -} - -#[cargo_test] -fn doc_lib_false() { - // doc = false for a library - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [lib] - doc = false - - [dependencies] - bar = {path = "bar"} - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file("src/bin/some-bin.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [lib] - doc = false - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("doc") - .with_stderr( - "\ -[CHECKING] bar v0.1.0 [..] -[CHECKING] foo v0.1.0 [..] -[DOCUMENTING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - assert!(!p.build_dir().join("doc/foo").exists()); - assert!(!p.build_dir().join("doc/bar").exists()); - assert!(p.build_dir().join("doc/some_bin").exists()); -} - -#[cargo_test] -fn doc_lib_false_dep() { - // doc = false for a dependency - // Ensures that the rmeta gets produced - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [lib] - doc = false - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("doc") - .with_stderr( - "\ -[CHECKING] bar v0.1.0 [..] -[DOCUMENTING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - assert!(p.build_dir().join("doc/foo").exists()); - assert!(!p.build_dir().join("doc/bar").exists()); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/edition.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/edition.rs deleted file mode 100644 index 01ba02635..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/edition.rs +++ /dev/null @@ -1,128 +0,0 @@ -//! Tests for edition setting. - -use cargo::core::Edition; -use cargo_test_support::{basic_lib_manifest, is_nightly, project}; - -#[cargo_test] -fn edition_works_for_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - edition = '2018' - - [build-dependencies] - a = { path = 'a' } - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { - a::foo(); - } - "#, - ) - .file("a/Cargo.toml", &basic_lib_manifest("a")) - .file("a/src/lib.rs", "pub fn foo() {}") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn edition_unstable_gated() { - // During the period where a new edition is coming up, but not yet stable, - // this test will verify that it cannot be used on stable. If there is no - // next edition, it does nothing. - let next = match Edition::LATEST_UNSTABLE { - Some(next) => next, - None => { - eprintln!("Next edition is currently not available, skipping test."); - return; - } - }; - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "{}" - "#, - next - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr(&format!( - "\ -[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - feature `edition{next}` is required - - The package requires the Cargo feature called `edition{next}`, \ - but that feature is not stabilized in this version of Cargo (1.[..]). - Consider trying a newer version of Cargo (this may require the nightly release). - See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#edition-{next} \ - for more information about the status of this feature. -", - next = next - )) - .run(); -} - -#[cargo_test] -fn edition_unstable() { - // During the period where a new edition is coming up, but not yet stable, - // this test will verify that it can be used with `cargo-features`. If - // there is no next edition, it does nothing. - if !is_nightly() { - // This test is fundamentally always nightly. - return; - } - let next = match Edition::LATEST_UNSTABLE { - Some(next) => next, - None => { - eprintln!("Next edition is currently not available, skipping test."); - return; - } - }; - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - cargo-features = ["edition{next}"] - - [package] - name = "foo" - version = "0.1.0" - edition = "{next}" - "#, - next = next - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[CHECKING] foo [..] -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/error.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/error.rs deleted file mode 100644 index 410902c21..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/error.rs +++ /dev/null @@ -1,19 +0,0 @@ -//! General error tests that don't belong anywhere else. - -use cargo_test_support::cargo_process; - -#[cargo_test] -fn internal_error() { - cargo_process("init") - .env("__CARGO_TEST_INTERNAL_ERROR", "1") - .with_status(101) - .with_stderr( - "\ -[ERROR] internal error test -[NOTE] this is an unexpected cargo internal error -[NOTE] we would appreciate a bug report: https://github.com/rust-lang/cargo/issues/ -[NOTE] cargo [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features.rs deleted file mode 100644 index 49a61301b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features.rs +++ /dev/null @@ -1,2058 +0,0 @@ -//! Tests for `[features]` table. - -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::{Dependency, Package}; -use cargo_test_support::{basic_manifest, project}; - -#[cargo_test] -fn invalid1() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - bar = ["baz"] - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - feature `bar` includes `baz` which is neither a dependency nor another feature -", - ) - .run(); -} - -#[cargo_test] -fn invalid2() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - bar = ["baz"] - baz = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - features and dependencies cannot have the same name: `bar` -", - ) - .run(); -} - -#[cargo_test] -fn invalid3() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - bar = ["baz"] - - [dependencies.baz] - path = "foo" - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - feature `bar` includes `baz`, but `baz` is not an optional dependency - A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition. -", - ) - .run(); -} - -#[cargo_test] -fn invalid4() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - features = ["bar"] - "#, - ) - .file("src/main.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to select a version for `bar`. - ... required by package `foo v0.0.1 ([..])` -versions that meet the requirements `*` are: 0.0.1 - -the package `foo` depends on `bar`, with features: `bar` but `bar` does not have these features. - - -failed to select a version for `bar` which could resolve this conflict", - ) - .run(); - - p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1")); - - p.cargo("build --features test") - .with_status(101) - .with_stderr("error: Package `foo v0.0.1 ([..])` does not have the feature `test`") - .run(); -} - -#[cargo_test] -fn invalid5() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies.bar] - path = "bar" - optional = true - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - dev-dependencies are not allowed to be optional: `bar` -", - ) - .run(); -} - -#[cargo_test] -fn invalid6() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ["bar/baz"] - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build --features foo") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - feature `foo` includes `bar/baz`, but `bar` is not a dependency -", - ) - .run(); -} - -#[cargo_test] -fn invalid7() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ["bar/baz"] - bar = [] - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build --features foo") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - feature `foo` includes `bar/baz`, but `bar` is not a dependency -", - ) - .run(); -} - -#[cargo_test] -fn invalid8() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - features = ["foo/bar"] - "#, - ) - .file("src/main.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build --features foo") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[CWD]/Cargo.toml` - -Caused by: - feature `foo/bar` in dependency `bar` is not allowed to contain slashes - If you want to enable features [..] -", - ) - .run(); -} - -#[cargo_test] -fn invalid9() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build --features bar") - .with_stderr( - "\ -error: Package `foo v0.0.1 ([..])` does not have feature `bar`. It has a required dependency with that name, but only optional dependencies can be used as features. -", - ).with_status(101).run(); -} - -#[cargo_test] -fn invalid10() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - features = ["baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies.baz] - path = "baz" - "#, - ) - .file("bar/src/lib.rs", "") - .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("bar/baz/src/lib.rs", "") - .build(); - - p.cargo("build").with_stderr("\ -error: failed to select a version for `bar`. - ... required by package `foo v0.0.1 ([..])` -versions that meet the requirements `*` are: 0.0.1 - -the package `foo` depends on `bar`, with features: `baz` but `bar` does not have these features. - It has a required dependency with that name, but only optional dependencies can be used as features. - - -failed to select a version for `bar` which could resolve this conflict -").with_status(101) - .run(); -} - -#[cargo_test] -fn no_transitive_dep_feature_requirement() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.derived] - path = "derived" - - [features] - default = ["derived/bar/qux"] - "#, - ) - .file( - "src/main.rs", - r#" - extern crate derived; - fn main() { derived::test(); } - "#, - ) - .file( - "derived/Cargo.toml", - r#" - [package] - name = "derived" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("derived/src/lib.rs", "extern crate bar; pub use bar::test;") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - qux = [] - "#, - ) - .file( - "bar/src/lib.rs", - r#" - #[cfg(feature = "qux")] - pub fn test() { print!("test"); } - "#, - ) - .build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[CWD]/Cargo.toml` - -Caused by: - multiple slashes in feature `derived/bar/qux` (included by feature `default`) are not allowed -", - ) - .run(); -} - -#[cargo_test] -fn no_feature_doesnt_build() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - optional = true - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(feature = "bar")] - extern crate bar; - #[cfg(feature = "bar")] - fn main() { bar::bar(); println!("bar") } - #[cfg(not(feature = "bar"))] - fn main() {} - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.process(&p.bin("foo")).with_stdout("").run(); - - p.cargo("build --features bar") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.process(&p.bin("foo")).with_stdout("bar\n").run(); -} - -#[cargo_test] -fn default_feature_pulled_in() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["bar"] - - [dependencies.bar] - path = "bar" - optional = true - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(feature = "bar")] - extern crate bar; - #[cfg(feature = "bar")] - fn main() { bar::bar(); println!("bar") } - #[cfg(not(feature = "bar"))] - fn main() {} - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.process(&p.bin("foo")).with_stdout("bar\n").run(); - - p.cargo("build --no-default-features") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.process(&p.bin("foo")).with_stdout("").run(); -} - -#[cargo_test] -fn cyclic_feature() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["default"] - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr("[ERROR] cyclic feature dependency: feature `default` depends on itself") - .run(); -} - -#[cargo_test] -fn cyclic_feature2() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ["bar"] - bar = ["foo"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn groups_on_groups_on_groups() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["f1"] - f1 = ["f2", "bar"] - f2 = ["f3", "f4"] - f3 = ["f5", "f6", "baz"] - f4 = ["f5", "f7"] - f5 = ["f6"] - f6 = ["f7"] - f7 = ["bar"] - - [dependencies.bar] - path = "bar" - optional = true - - [dependencies.baz] - path = "baz" - optional = true - "#, - ) - .file( - "src/main.rs", - r#" - #[allow(unused_extern_crates)] - extern crate bar; - #[allow(unused_extern_crates)] - extern crate baz; - fn main() {} - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn many_cli_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - optional = true - - [dependencies.baz] - path = "baz" - optional = true - "#, - ) - .file( - "src/main.rs", - r#" - #[allow(unused_extern_crates)] - extern crate bar; - #[allow(unused_extern_crates)] - extern crate baz; - fn main() {} - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build --features") - .arg("bar baz") - .with_stderr( - "\ -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn union_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - features = ["f1"] - [dependencies.d2] - path = "d2" - features = ["f2"] - "#, - ) - .file( - "src/main.rs", - r#" - #[allow(unused_extern_crates)] - extern crate d1; - extern crate d2; - fn main() { - d2::f1(); - d2::f2(); - } - "#, - ) - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [features] - f1 = ["d2"] - - [dependencies.d2] - path = "../d2" - features = ["f1"] - optional = true - "#, - ) - .file("d1/src/lib.rs", "") - .file( - "d2/Cargo.toml", - r#" - [package] - name = "d2" - version = "0.0.1" - authors = [] - - [features] - f1 = [] - f2 = [] - "#, - ) - .file( - "d2/src/lib.rs", - r#" - #[cfg(feature = "f1")] pub fn f1() {} - #[cfg(feature = "f2")] pub fn f2() {} - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] d2 v0.0.1 ([CWD]/d2) -[COMPILING] d1 v0.0.1 ([CWD]/d1) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn many_features_no_rebuilds() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - authors = [] - - [dependencies.a] - path = "a" - features = ["fall"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - authors = [] - - [features] - ftest = [] - ftest2 = [] - fall = ["ftest", "ftest2"] - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] a v0.1.0 ([CWD]/a) -[COMPILING] b v0.1.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.root().move_into_the_past(); - - p.cargo("build -v") - .with_stderr( - "\ -[FRESH] a v0.1.0 ([..]/a) -[FRESH] b v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -// Tests that all cmd lines work with `--features ""` -#[cargo_test] -fn empty_features() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("build --features").arg("").run(); -} - -// Tests that all cmd lines work with `--features ""` -#[cargo_test] -fn transitive_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ["bar/baz"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::baz(); }") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - baz = [] - "#, - ) - .file( - "bar/src/lib.rs", - r#"#[cfg(feature = "baz")] pub fn baz() {}"#, - ) - .build(); - - p.cargo("build --features foo").run(); -} - -#[cargo_test] -fn everything_in_the_lockfile() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - f1 = ["d1/f1"] - f2 = ["d2"] - - [dependencies.d1] - path = "d1" - [dependencies.d2] - path = "d2" - optional = true - [dependencies.d3] - path = "d3" - optional = true - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [features] - f1 = [] - "#, - ) - .file("d1/src/lib.rs", "") - .file("d2/Cargo.toml", &basic_manifest("d2", "0.0.2")) - .file("d2/src/lib.rs", "") - .file( - "d3/Cargo.toml", - r#" - [package] - name = "d3" - version = "0.0.3" - authors = [] - - [features] - f3 = [] - "#, - ) - .file("d3/src/lib.rs", "") - .build(); - - p.cargo("fetch").run(); - let lockfile = p.read_lockfile(); - assert!( - lockfile.contains(r#"name = "d1""#), - "d1 not found\n{}", - lockfile - ); - assert!( - lockfile.contains(r#"name = "d2""#), - "d2 not found\n{}", - lockfile - ); - assert!( - lockfile.contains(r#"name = "d3""#), - "d3 not found\n{}", - lockfile - ); -} - -#[cargo_test] -fn no_rebuild_when_frobbing_default_feature() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = "a" } - b = { path = "b" } - "#, - ) - .file("src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = "../a", features = ["f1"], default-features = false } - "#, - ) - .file("b/src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - authors = [] - - [features] - default = ["f1"] - f1 = [] - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stdout("").run(); - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn unions_work_with_no_default_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = "a" } - b = { path = "b" } - "#, - ) - .file("src/lib.rs", "extern crate a; pub fn foo() { a::a(); }") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = "../a", features = [], default-features = false } - "#, - ) - .file("b/src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - authors = [] - - [features] - default = ["f1"] - f1 = [] - "#, - ) - .file("a/src/lib.rs", r#"#[cfg(feature = "f1")] pub fn a() {}"#) - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stdout("").run(); - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn optional_and_dev_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { path = "foo", optional = true } - [dev-dependencies] - foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] test v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn activating_feature_activates_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { path = "foo", optional = true } - - [features] - a = ["foo/a"] - "#, - ) - .file( - "src/lib.rs", - "extern crate foo; pub fn bar() { foo::bar(); }", - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [features] - a = [] - "#, - ) - .file("foo/src/lib.rs", r#"#[cfg(feature = "a")] pub fn bar() {}"#) - .build(); - - p.cargo("build --features a -v").run(); -} - -#[cargo_test] -fn dep_feature_in_cmd_line() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.derived] - path = "derived" - "#, - ) - .file( - "src/main.rs", - r#" - extern crate derived; - fn main() { derived::test(); } - "#, - ) - .file( - "derived/Cargo.toml", - r#" - [package] - name = "derived" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - - [features] - default = [] - derived-feat = ["bar/some-feat"] - "#, - ) - .file("derived/src/lib.rs", "extern crate bar; pub use bar::test;") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - some-feat = [] - "#, - ) - .file( - "bar/src/lib.rs", - r#" - #[cfg(feature = "some-feat")] - pub fn test() { print!("test"); } - "#, - ) - .build(); - - // The foo project requires that feature "some-feat" in "bar" is enabled. - // Building without any features enabled should fail: - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]unresolved import `bar::test`") - .run(); - - // We should be able to enable the feature "derived-feat", which enables "some-feat", - // on the command line. The feature is enabled, thus building should be successful: - p.cargo("build --features derived/derived-feat").run(); - - // Trying to enable features of transitive dependencies is an error - p.cargo("build --features bar/some-feat") - .with_status(101) - .with_stderr("error: package `foo v0.0.1 ([..])` does not have a dependency named `bar`") - .run(); - - // Hierarchical feature specification should still be disallowed - p.cargo("build --features derived/bar/some-feat") - .with_status(101) - .with_stderr("[ERROR] multiple slashes in feature `derived/bar/some-feat` is not allowed") - .run(); -} - -#[cargo_test] -fn all_features_flag_enables_all_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = [] - bar = [] - - [dependencies.baz] - path = "baz" - optional = true - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(feature = "foo")] - pub fn foo() {} - - #[cfg(feature = "bar")] - pub fn bar() { - extern crate baz; - baz::baz(); - } - - fn main() { - foo(); - bar(); - } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build --all-features").run(); -} - -#[cargo_test] -fn many_cli_features_comma_delimited() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - optional = true - - [dependencies.baz] - path = "baz" - optional = true - "#, - ) - .file( - "src/main.rs", - r#" - #[allow(unused_extern_crates)] - extern crate bar; - #[allow(unused_extern_crates)] - extern crate baz; - fn main() {} - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build --features bar,baz") - .with_stderr( - "\ -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn many_cli_features_comma_and_space_delimited() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - optional = true - - [dependencies.baz] - path = "baz" - optional = true - - [dependencies.bam] - path = "bam" - optional = true - - [dependencies.bap] - path = "bap" - optional = true - "#, - ) - .file( - "src/main.rs", - r#" - #[allow(unused_extern_crates)] - extern crate bar; - #[allow(unused_extern_crates)] - extern crate baz; - #[allow(unused_extern_crates)] - extern crate bam; - #[allow(unused_extern_crates)] - extern crate bap; - fn main() {} - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .file("bam/Cargo.toml", &basic_manifest("bam", "0.0.1")) - .file("bam/src/lib.rs", "pub fn bam() {}") - .file("bap/Cargo.toml", &basic_manifest("bap", "0.0.1")) - .file("bap/src/lib.rs", "pub fn bap() {}") - .build(); - - p.cargo("build --features") - .arg("bar,baz bam bap") - .with_stderr( - "\ -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] ba[..] v0.0.1 ([CWD]/ba[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn only_dep_is_optional() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - foo = ['bar'] - - [dependencies] - bar = { version = "0.1", optional = true } - - [dev-dependencies] - bar = "0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn all_features_all_crates() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [workspace] - members = ['bar'] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - foo = [] - "#, - ) - .file("bar/src/main.rs", "#[cfg(feature = \"foo\")] fn main() {}") - .build(); - - p.cargo("build --all-features --workspace").run(); -} - -#[cargo_test] -fn feature_off_dylib() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - - [package] - name = "foo" - version = "0.0.1" - - [lib] - crate-type = ["dylib"] - - [features] - f1 = [] - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn hello() -> &'static str { - if cfg!(feature = "f1") { - "f1" - } else { - "no f1" - } - } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - - [dependencies] - foo = { path = ".." } - "#, - ) - .file( - "bar/src/main.rs", - r#" - extern crate foo; - - fn main() { - assert_eq!(foo::hello(), "no f1"); - } - "#, - ) - .build(); - - // Build the dylib with `f1` feature. - p.cargo("build --features f1").run(); - // Check that building without `f1` uses a dylib without `f1`. - p.cargo("run -p bar").run(); -} - -#[cargo_test] -fn warn_if_default_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - optional = true - - [features] - default-features = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build") - .with_stderr( - r#" -[WARNING] `default-features = [".."]` was found in [features]. Did you mean to use `default = [".."]`? -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] - "#.trim(), - ).run(); -} - -#[cargo_test] -fn no_feature_for_non_optional_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(not(feature = "bar"))] - fn main() { - } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - a = [] - "#, - ) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build --features bar/a").run(); -} - -#[cargo_test] -fn features_option_given_twice() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - b = [] - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(all(feature = "a", feature = "b"))] - fn main() {} - "#, - ) - .build(); - - p.cargo("build --features a --features b").run(); -} - -#[cargo_test] -fn multi_multi_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - b = [] - c = [] - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(all(feature = "a", feature = "b", feature = "c"))] - fn main() {} - "#, - ) - .build(); - - p.cargo("build --features a --features").arg("b c").run(); -} - -#[cargo_test] -fn cli_parse_ok() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(feature = "a")] - fn main() { - assert_eq!(std::env::args().nth(1).unwrap(), "b"); - } - "#, - ) - .build(); - - p.cargo("run --features a b").run(); -} - -#[cargo_test] -fn all_features_virtual_ws() { - // What happens with `--all-features` in the root of a virtual workspace. - // Some of this behavior is a little strange (member dependencies also - // have all features enabled, one might expect `f4` to be disabled). - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - edition = "2018" - - [dependencies] - b = {path="../b", optional=true} - - [features] - default = ["f1"] - f1 = [] - f2 = [] - "#, - ) - .file( - "a/src/main.rs", - r#" - fn main() { - if cfg!(feature="f1") { - println!("f1"); - } - if cfg!(feature="f2") { - println!("f2"); - } - #[cfg(feature="b")] - b::f(); - } - "#, - ) - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [features] - default = ["f3"] - f3 = [] - f4 = [] - "#, - ) - .file( - "b/src/lib.rs", - r#" - pub fn f() { - if cfg!(feature="f3") { - println!("f3"); - } - if cfg!(feature="f4") { - println!("f4"); - } - } - "#, - ) - .build(); - - p.cargo("run").with_stdout("f1\n").run(); - p.cargo("run --all-features") - .with_stdout("f1\nf2\nf3\nf4\n") - .run(); - // In `a`, it behaves differently. :( - p.cargo("run --all-features") - .cwd("a") - .with_stdout("f1\nf2\nf3\n") - .run(); -} - -#[cargo_test] -fn slash_optional_enables() { - // --features dep/feat will enable `dep` and set its feature. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = {path="dep", optional=true} - "#, - ) - .file( - "src/lib.rs", - r#" - #[cfg(not(feature="dep"))] - compile_error!("dep not set"); - "#, - ) - .file( - "dep/Cargo.toml", - r#" - [package] - name = "dep" - version = "0.1.0" - - [features] - feat = [] - "#, - ) - .file( - "dep/src/lib.rs", - r#" - #[cfg(not(feature="feat"))] - compile_error!("feat not set"); - "#, - ) - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr_contains("[..]dep not set[..]") - .run(); - - p.cargo("check --features dep/feat").run(); -} - -#[cargo_test] -fn registry_summary_order_doesnt_matter() { - // Checks for an issue where the resolver depended on the order of entries - // in the registry summary. If there was a non-optional dev-dependency - // that appeared before an optional normal dependency, then the resolver - // would not activate the optional dependency with a pkg/featname feature - // syntax. - Package::new("dep", "0.1.0") - .feature("feat1", &[]) - .file( - "src/lib.rs", - r#" - #[cfg(feature="feat1")] - pub fn work() { - println!("it works"); - } - "#, - ) - .publish(); - Package::new("bar", "0.1.0") - .feature("bar_feat", &["dep/feat1"]) - .add_dep(Dependency::new("dep", "0.1.0").dev()) - .add_dep(Dependency::new("dep", "0.1.0").optional(true)) - .file( - "src/lib.rs", - r#" - // This will fail to compile without `dep` optional dep activated. - extern crate dep; - - pub fn doit() { - dep::work(); - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - bar = { version="0.1", features = ["bar_feat"] } - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - bar::doit(); - } - "#, - ) - .build(); - - p.cargo("run") - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] [..] -[DOWNLOADED] [..] -[COMPILING] dep v0.1.0 -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -[RUNNING] `target/debug/foo[EXE]` -", - ) - .with_stdout("it works") - .run(); -} - -#[cargo_test] -fn nonexistent_required_features() { - Package::new("required_dependency", "0.1.0") - .feature("simple", &[]) - .publish(); - Package::new("optional_dependency", "0.2.0") - .feature("optional", &[]) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - [features] - existing = [] - fancy = ["optional_dependency"] - [dependencies] - required_dependency = { version = "0.1", optional = false} - optional_dependency = { version = "0.2", optional = true} - [[example]] - name = "ololo" - required-features = ["not_present", - "existing", - "fancy", - "required_dependency/not_existing", - "required_dependency/simple", - "optional_dependency/optional", - "not_specified_dependency/some_feature"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("examples/ololo.rs", "fn main() {}") - .build(); - - p.cargo("build --examples") - .with_stderr_contains( - "\ -[WARNING] invalid feature `not_present` in required-features of target `ololo`: \ - `not_present` is not present in [features] section -[WARNING] invalid feature `required_dependency/not_existing` in required-features \ - of target `ololo`: feature `not_existing` does not exist in package \ - `required_dependency v0.1.0` -[WARNING] invalid feature `not_specified_dependency/some_feature` in required-features \ - of target `ololo`: dependency `not_specified_dependency` does not exist -", - ) - .run(); -} - -#[cargo_test] -fn invalid_feature_names_warning() { - // Warnings for more restricted feature syntax. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [features] - # Some valid, but unusual names, shouldn't warn. - "c++17" = [] - "128bit" = [] - "_foo" = [] - "feat-name" = [] - "feat_name" = [] - "foo.bar" = [] - - # Invalid names. - "+foo" = [] - "-foo" = [] - ".foo" = [] - "foo:bar" = [] - "foo?" = [] - "?foo" = [] - "โ’ถโ’ทโ’ธ" = [] - "aยผ" = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Unfortunately the warnings are duplicated due to the Summary being - // loaded twice (once in the Workspace, and once in PackageRegistry) and - // Cargo does not have a de-duplication system. This should probably be - // OK, since I'm not expecting this to affect anyone. - p.cargo("check") - .with_stderr("\ -[WARNING] invalid character `+` in feature `+foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `-` in feature `-foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `.` in feature `.foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `?` in feature `?foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `ยผ` in feature `aยผ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `:` in feature `foo:bar` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `?` in feature `foo?` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `โ’ถ` in feature `โ’ถโ’ทโ’ธ` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `โ’ท` in feature `โ’ถโ’ทโ’ธ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `โ’ธ` in feature `โ’ถโ’ทโ’ธ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `+` in feature `+foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `-` in feature `-foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `.` in feature `.foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `?` in feature `?foo` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `ยผ` in feature `aยผ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `:` in feature `foo:bar` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `?` in feature `foo?` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `โ’ถ` in feature `โ’ถโ’ทโ’ธ` in package foo v0.1.0 ([ROOT]/foo), the first character must be a Unicode XID start character or digit (most letters or `_` or `0` to `9`) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `โ’ท` in feature `โ’ถโ’ทโ’ธ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[WARNING] invalid character `โ’ธ` in feature `โ’ถโ’ทโ’ธ` in package foo v0.1.0 ([ROOT]/foo), characters must be Unicode XID characters, `+`, or `.` (numbers, `+`, `-`, `_`, `.`, or most letters) -This was previously accepted but is being phased out; it will become a hard error in a future release. -For more information, see issue #8813 , and please leave a comment if this will be a problem for your project. -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -") - .run(); -} - -#[cargo_test] -fn invalid_feature_names_error() { - // Errors for more restricted feature syntax. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [features] - "foo/bar" = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[CWD]/Cargo.toml` - -Caused by: - feature named `foo/bar` is not allowed to contain slashes -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features2.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features2.rs deleted file mode 100644 index f1f1222a0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features2.rs +++ /dev/null @@ -1,2400 +0,0 @@ -//! Tests for the new feature resolver. - -use cargo_test_support::cross_compile::{self, alternate}; -use cargo_test_support::install::cargo_home; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::publish::validate_crate_contents; -use cargo_test_support::registry::{Dependency, Package}; -use cargo_test_support::{basic_manifest, cargo_process, project, rustc_host, Project}; -use std::fs::File; - -/// Switches Cargo.toml to use `resolver = "2"`. -pub fn switch_to_resolver_2(p: &Project) { - let mut manifest = p.read_file("Cargo.toml"); - if manifest.contains("resolver =") { - panic!("did not expect manifest to already contain a resolver setting"); - } - if let Some(index) = manifest.find("[workspace]\n") { - manifest.insert_str(index + 12, "resolver = \"2\"\n"); - } else if let Some(index) = manifest.find("[package]\n") { - manifest.insert_str(index + 10, "resolver = \"2\"\n"); - } else { - panic!("expected [package] or [workspace] in manifest"); - } - p.change_file("Cargo.toml", &manifest); -} - -#[cargo_test] -fn inactivate_targets() { - // Basic test of `itarget`. A shared dependency where an inactive [target] - // changes the features. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .file( - "src/lib.rs", - r#" - #[cfg(feature = "f1")] - compile_error!("f1 should not activate"); - "#, - ) - .publish(); - - Package::new("bar", "1.0.0") - .add_dep( - Dependency::new("common", "1.0") - .target("cfg(whatever)") - .enable_features(&["f1"]), - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - common = "1.0" - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr_contains("[..]f1 should not activate[..]") - .run(); - - switch_to_resolver_2(&p); - p.cargo("check").run(); -} - -#[cargo_test] -fn inactive_target_optional() { - // Activating optional [target] dependencies for inactivate target. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .feature("f2", &[]) - .feature("f3", &[]) - .feature("f4", &[]) - .file( - "src/lib.rs", - r#" - pub fn f() { - if cfg!(feature="f1") { println!("f1"); } - if cfg!(feature="f2") { println!("f2"); } - if cfg!(feature="f3") { println!("f3"); } - if cfg!(feature="f4") { println!("f4"); } - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - common = "1.0" - - [target.'cfg(whatever)'.dependencies] - dep1 = {path='dep1', optional=true} - dep2 = {path='dep2', optional=true, features=["f3"]} - common = {version="1.0", optional=true, features=["f4"]} - - [features] - foo1 = ["dep1/f2"] - foo2 = ["dep2"] - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(feature="foo1") { println!("foo1"); } - if cfg!(feature="foo2") { println!("foo2"); } - if cfg!(feature="dep1") { println!("dep1"); } - if cfg!(feature="dep2") { println!("dep2"); } - if cfg!(feature="common") { println!("common"); } - common::f(); - } - "#, - ) - .file( - "dep1/Cargo.toml", - r#" - [package] - name = "dep1" - version = "0.1.0" - - [dependencies] - common = {version="1.0", features=["f1"]} - - [features] - f2 = ["common/f2"] - "#, - ) - .file( - "dep1/src/lib.rs", - r#"compile_error!("dep1 should not build");"#, - ) - .file( - "dep2/Cargo.toml", - r#" - [package] - name = "dep2" - version = "0.1.0" - - [dependencies] - common = "1.0" - - [features] - f3 = ["common/f3"] - "#, - ) - .file( - "dep2/src/lib.rs", - r#"compile_error!("dep2 should not build");"#, - ) - .build(); - - p.cargo("run --all-features") - .with_stdout("foo1\nfoo2\ndep1\ndep2\ncommon\nf1\nf2\nf3\nf4\n") - .run(); - p.cargo("run --features dep1") - .with_stdout("dep1\nf1\n") - .run(); - p.cargo("run --features foo1") - .with_stdout("foo1\ndep1\nf1\nf2\n") - .run(); - p.cargo("run --features dep2") - .with_stdout("dep2\nf3\n") - .run(); - p.cargo("run --features common") - .with_stdout("common\nf4\n") - .run(); - - switch_to_resolver_2(&p); - p.cargo("run --all-features") - .with_stdout("foo1\nfoo2\ndep1\ndep2\ncommon") - .run(); - p.cargo("run --features dep1").with_stdout("dep1\n").run(); - p.cargo("run --features foo1").with_stdout("foo1\n").run(); - p.cargo("run --features dep2").with_stdout("dep2\n").run(); - p.cargo("run --features common").with_stdout("common").run(); -} - -#[cargo_test] -fn itarget_proc_macro() { - // itarget inside a proc-macro while cross-compiling - if cross_compile::disabled() { - return; - } - Package::new("hostdep", "1.0.0").publish(); - Package::new("pm", "1.0.0") - .proc_macro(true) - .target_dep("hostdep", "1.0", rustc_host()) - .file("src/lib.rs", "extern crate hostdep;") - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - pm = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Old behavior - p.cargo("check").run(); - p.cargo("check --target").arg(alternate()).run(); - - // New behavior - switch_to_resolver_2(&p); - p.cargo("check").run(); - p.cargo("check --target").arg(alternate()).run(); - // For good measure, just make sure things don't break. - p.cargo("check --target").arg(alternate()).run(); -} - -#[cargo_test] -fn decouple_host_deps() { - // Basic test for `host_dep` decouple. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .file( - "src/lib.rs", - r#" - #[cfg(feature = "f1")] - pub fn foo() {} - #[cfg(not(feature = "f1"))] - pub fn bar() {} - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [build-dependencies] - common = {version="1.0", features=["f1"]} - - [dependencies] - common = "1.0" - "#, - ) - .file( - "build.rs", - r#" - use common::foo; - fn main() {} - "#, - ) - .file("src/lib.rs", "use common::bar;") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr_contains("[..]unresolved import `common::bar`[..]") - .run(); - - switch_to_resolver_2(&p); - p.cargo("check").run(); -} - -#[cargo_test] -fn decouple_host_deps_nested() { - // `host_dep` decouple of transitive dependencies. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .file( - "src/lib.rs", - r#" - #[cfg(feature = "f1")] - pub fn foo() {} - #[cfg(not(feature = "f1"))] - pub fn bar() {} - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [build-dependencies] - bdep = {path="bdep"} - - [dependencies] - common = "1.0" - "#, - ) - .file( - "build.rs", - r#" - use bdep::foo; - fn main() {} - "#, - ) - .file("src/lib.rs", "use common::bar;") - .file( - "bdep/Cargo.toml", - r#" - [package] - name = "bdep" - version = "0.1.0" - edition = "2018" - - [dependencies] - common = {version="1.0", features=["f1"]} - "#, - ) - .file("bdep/src/lib.rs", "pub use common::foo;") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr_contains("[..]unresolved import `common::bar`[..]") - .run(); - - switch_to_resolver_2(&p); - p.cargo("check").run(); -} - -#[cargo_test] -fn decouple_dev_deps() { - // Basic test for `dev_dep` decouple. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .feature("f2", &[]) - .file( - "src/lib.rs", - r#" - // const ensures it uses the correct dependency at *build time* - // compared to *link time*. - #[cfg(all(feature="f1", not(feature="f2")))] - pub const X: u32 = 1; - - #[cfg(all(feature="f1", feature="f2"))] - pub const X: u32 = 3; - - pub fn foo() -> u32 { - let mut res = 0; - if cfg!(feature = "f1") { - res |= 1; - } - if cfg!(feature = "f2") { - res |= 2; - } - res - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - common = {version="1.0", features=["f1"]} - - [dev-dependencies] - common = {version="1.0", features=["f2"]} - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - let expected: u32 = std::env::args().skip(1).next().unwrap().parse().unwrap(); - assert_eq!(foo::foo(), expected); - assert_eq!(foo::build_time(), expected); - assert_eq!(common::foo(), expected); - assert_eq!(common::X, expected); - } - - #[test] - fn test_bin() { - assert_eq!(foo::foo(), 3); - assert_eq!(common::foo(), 3); - assert_eq!(common::X, 3); - assert_eq!(foo::build_time(), 3); - } - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn foo() -> u32 { - common::foo() - } - - pub fn build_time() -> u32 { - common::X - } - - #[test] - fn test_lib() { - assert_eq!(foo(), 3); - assert_eq!(common::foo(), 3); - assert_eq!(common::X, 3); - } - "#, - ) - .file( - "tests/t1.rs", - r#" - #[test] - fn test_t1() { - assert_eq!(foo::foo(), 3); - assert_eq!(common::foo(), 3); - assert_eq!(common::X, 3); - assert_eq!(foo::build_time(), 3); - } - - #[test] - fn test_main() { - // Features are unified for main when run with `cargo test`, - // even with the new resolver. - let s = std::process::Command::new("target/debug/foo") - .arg("3") - .status().unwrap(); - assert!(s.success()); - } - "#, - ) - .build(); - - // Old behavior - p.cargo("run 3").run(); - p.cargo("test").run(); - - // New behavior - switch_to_resolver_2(&p); - p.cargo("run 1").run(); - p.cargo("test").run(); -} - -#[cargo_test] -fn build_script_runtime_features() { - // Check that the CARGO_FEATURE_* environment variable is set correctly. - // - // This has a common dependency between build/normal/dev-deps, and it - // queries which features it was built with in different circumstances. - Package::new("common", "1.0.0") - .feature("normal", &[]) - .feature("dev", &[]) - .feature("build", &[]) - .file( - "build.rs", - r#" - fn is_set(name: &str) -> bool { - std::env::var(name) == Ok("1".to_string()) - } - - fn main() { - let mut res = 0; - if is_set("CARGO_FEATURE_NORMAL") { - res |= 1; - } - if is_set("CARGO_FEATURE_DEV") { - res |= 2; - } - if is_set("CARGO_FEATURE_BUILD") { - res |= 4; - } - println!("cargo:rustc-cfg=RunCustomBuild=\"{}\"", res); - - let mut res = 0; - if cfg!(feature = "normal") { - res |= 1; - } - if cfg!(feature = "dev") { - res |= 2; - } - if cfg!(feature = "build") { - res |= 4; - } - println!("cargo:rustc-cfg=CustomBuild=\"{}\"", res); - } - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn foo() -> u32 { - let mut res = 0; - if cfg!(feature = "normal") { - res |= 1; - } - if cfg!(feature = "dev") { - res |= 2; - } - if cfg!(feature = "build") { - res |= 4; - } - res - } - - pub fn build_time() -> u32 { - #[cfg(RunCustomBuild="1")] return 1; - #[cfg(RunCustomBuild="3")] return 3; - #[cfg(RunCustomBuild="4")] return 4; - #[cfg(RunCustomBuild="5")] return 5; - #[cfg(RunCustomBuild="7")] return 7; - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [build-dependencies] - common = {version="1.0", features=["build"]} - - [dependencies] - common = {version="1.0", features=["normal"]} - - [dev-dependencies] - common = {version="1.0", features=["dev"]} - "#, - ) - .file( - "build.rs", - r#" - fn main() { - assert_eq!(common::foo(), common::build_time()); - println!("cargo:rustc-cfg=from_build=\"{}\"", common::foo()); - } - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn foo() -> u32 { - common::foo() - } - - pub fn build_time() -> u32 { - common::build_time() - } - - #[test] - fn test_lib() { - assert_eq!(common::foo(), common::build_time()); - assert_eq!(common::foo(), - std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap()); - } - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - assert_eq!(common::foo(), common::build_time()); - assert_eq!(common::foo(), - std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap()); - } - - #[test] - fn test_bin() { - assert_eq!(common::foo(), common::build_time()); - assert_eq!(common::foo(), - std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap()); - } - "#, - ) - .file( - "tests/t1.rs", - r#" - #[test] - fn test_t1() { - assert_eq!(common::foo(), common::build_time()); - assert_eq!(common::foo(), - std::env::var("CARGO_FEATURE_EXPECT").unwrap().parse().unwrap()); - } - - #[test] - fn test_main() { - // Features are unified for main when run with `cargo test`, - // even with the new resolver. - let s = std::process::Command::new("target/debug/foo") - .status().unwrap(); - assert!(s.success()); - } - "#, - ) - .build(); - - // Old way, unifies all 3. - p.cargo("run").env("CARGO_FEATURE_EXPECT", "7").run(); - p.cargo("test").env("CARGO_FEATURE_EXPECT", "7").run(); - - // New behavior. - switch_to_resolver_2(&p); - - // normal + build unify - p.cargo("run").env("CARGO_FEATURE_EXPECT", "1").run(); - - // dev_deps are still unified with `cargo test` - p.cargo("test").env("CARGO_FEATURE_EXPECT", "3").run(); -} - -#[cargo_test] -fn cyclical_dev_dep() { - // Check how a cyclical dev-dependency will work. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [features] - dev = [] - - [dev-dependencies] - foo = { path = '.', features = ["dev"] } - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn assert_dev(enabled: bool) { - assert_eq!(enabled, cfg!(feature="dev")); - } - - #[test] - fn test_in_lib() { - assert_dev(true); - } - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - let expected: bool = std::env::args().skip(1).next().unwrap().parse().unwrap(); - foo::assert_dev(expected); - } - "#, - ) - .file( - "tests/t1.rs", - r#" - #[test] - fn integration_links() { - foo::assert_dev(true); - // The lib linked with main.rs will also be unified. - let s = std::process::Command::new("target/debug/foo") - .arg("true") - .status().unwrap(); - assert!(s.success()); - } - "#, - ) - .build(); - - // Old way unifies features. - p.cargo("run true").run(); - // dev feature should always be enabled in tests. - p.cargo("test").run(); - - // New behavior. - switch_to_resolver_2(&p); - // Should decouple main. - p.cargo("run false").run(); - - // And this should be no different. - p.cargo("test").run(); -} - -#[cargo_test] -fn all_feature_opts() { - // All feature options at once. - Package::new("common", "1.0.0") - .feature("normal", &[]) - .feature("build", &[]) - .feature("dev", &[]) - .feature("itarget", &[]) - .file( - "src/lib.rs", - r#" - pub fn feats() -> u32 { - let mut res = 0; - if cfg!(feature="normal") { res |= 1; } - if cfg!(feature="build") { res |= 2; } - if cfg!(feature="dev") { res |= 4; } - if cfg!(feature="itarget") { res |= 8; } - res - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - common = {version = "1.0", features=["normal"]} - - [dev-dependencies] - common = {version = "1.0", features=["dev"]} - - [build-dependencies] - common = {version = "1.0", features=["build"]} - - [target.'cfg(whatever)'.dependencies] - common = {version = "1.0", features=["itarget"]} - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - expect(); - } - - fn expect() { - let expected: u32 = std::env::var("EXPECTED_FEATS").unwrap().parse().unwrap(); - assert_eq!(expected, common::feats()); - } - - #[test] - fn from_test() { - expect(); - } - "#, - ) - .build(); - - p.cargo("run").env("EXPECTED_FEATS", "15").run(); - p.cargo("test").env("EXPECTED_FEATS", "15").run(); - - // New behavior. - switch_to_resolver_2(&p); - // Only normal feature. - p.cargo("run").env("EXPECTED_FEATS", "1").run(); - - // only normal+dev - p.cargo("test").env("EXPECTED_FEATS", "5").run(); -} - -#[cargo_test] -fn required_features_host_dep() { - // Check that required-features handles build-dependencies correctly. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [[bin]] - name = "x" - required-features = ["bdep/f1"] - - [build-dependencies] - bdep = {path="bdep"} - "#, - ) - .file("build.rs", "fn main() {}") - .file( - "src/bin/x.rs", - r#" - fn main() {} - "#, - ) - .file( - "bdep/Cargo.toml", - r#" - [package] - name = "bdep" - version = "0.1.0" - - [features] - f1 = [] - "#, - ) - .file("bdep/src/lib.rs", "") - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr( - "\ -[ERROR] target `x` in package `foo` requires the features: `bdep/f1` -Consider enabling them by passing, e.g., `--features=\"bdep/f1\"` -", - ) - .run(); - - // New behavior. - switch_to_resolver_2(&p); - p.cargo("run --features bdep/f1").run(); -} - -#[cargo_test] -fn disabled_shared_host_dep() { - // Check for situation where an optional dep of a shared dep is enabled in - // a normal dependency, but disabled in an optional one. The unit tree is: - // foo - // โ”œโ”€โ”€ foo build.rs - // | โ””โ”€โ”€ common (BUILD dependency, NO FEATURES) - // โ””โ”€โ”€ common (Normal dependency, default features) - // โ””โ”€โ”€ somedep - Package::new("somedep", "1.0.0") - .file( - "src/lib.rs", - r#" - pub fn f() { println!("hello from somedep"); } - "#, - ) - .publish(); - Package::new("common", "1.0.0") - .feature("default", &["somedep"]) - .add_dep(Dependency::new("somedep", "1.0").optional(true)) - .file( - "src/lib.rs", - r#" - pub fn check_somedep() -> bool { - #[cfg(feature="somedep")] - { - extern crate somedep; - somedep::f(); - true - } - #[cfg(not(feature="somedep"))] - { - println!("no somedep"); - false - } - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - edition = "2018" - resolver = "2" - - [dependencies] - common = "1.0" - - [build-dependencies] - common = {version = "1.0", default-features = false} - "#, - ) - .file( - "src/main.rs", - "fn main() { assert!(common::check_somedep()); }", - ) - .file( - "build.rs", - "fn main() { assert!(!common::check_somedep()); }", - ) - .build(); - - p.cargo("run -v").with_stdout("hello from somedep").run(); -} - -#[cargo_test] -fn required_features_inactive_dep() { - // required-features with an inactivated dep. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "2" - - [target.'cfg(whatever)'.dependencies] - bar = {path="bar"} - - [[bin]] - name = "foo" - required-features = ["feat1"] - - [features] - feat1 = [] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("check").with_stderr("[FINISHED] [..]").run(); - - p.cargo("check --features=feat1") - .with_stderr("[CHECKING] foo[..]\n[FINISHED] [..]") - .run(); -} - -#[cargo_test] -fn decouple_proc_macro() { - // proc macro features are not shared - Package::new("common", "1.0.0") - .feature("somefeat", &[]) - .file( - "src/lib.rs", - r#" - pub const fn foo() -> bool { cfg!(feature="somefeat") } - #[cfg(feature="somefeat")] - pub const FEAT_ONLY_CONST: bool = true; - "#, - ) - .publish(); - Package::new("pm", "1.0.0") - .proc_macro(true) - .feature_dep("common", "1.0", &["somefeat"]) - .file( - "src/lib.rs", - r#" - extern crate proc_macro; - extern crate common; - #[proc_macro] - pub fn foo(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - assert!(common::foo()); - "".parse().unwrap() - } - "#, - ) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - edition = "2018" - - [dependencies] - pm = "1.0" - common = "1.0" - "#, - ) - .file( - "src/lib.rs", - r#" - //! Test with docs. - //! - //! ```rust - //! pm::foo!{} - //! fn main() { - //! let expected = std::env::var_os("TEST_EXPECTS_ENABLED").is_some(); - //! assert_eq!(expected, common::foo(), "common is wrong"); - //! } - //! ``` - "#, - ) - .file( - "src/main.rs", - r#" - pm::foo!{} - fn main() { - println!("it is {}", common::foo()); - } - "#, - ) - .build(); - - p.cargo("run") - .env("TEST_EXPECTS_ENABLED", "1") - .with_stdout("it is true") - .run(); - // Make sure the test is fallible. - p.cargo("test --doc") - .with_status(101) - .with_stdout_contains("[..]common is wrong[..]") - .run(); - p.cargo("test --doc").env("TEST_EXPECTS_ENABLED", "1").run(); - p.cargo("doc").run(); - assert!(p - .build_dir() - .join("doc/common/constant.FEAT_ONLY_CONST.html") - .exists()); - // cargo doc should clean in-between runs, but it doesn't, and leaves stale files. - // https://github.com/rust-lang/cargo/issues/6783 (same for removed items) - p.build_dir().join("doc").rm_rf(); - - // New behavior. - switch_to_resolver_2(&p); - p.cargo("run").with_stdout("it is false").run(); - - p.cargo("test --doc").run(); - p.cargo("doc").run(); - assert!(!p - .build_dir() - .join("doc/common/constant.FEAT_ONLY_CONST.html") - .exists()); -} - -#[cargo_test] -fn proc_macro_ws() { - // Checks for bug with proc-macro in a workspace with dependency (shouldn't panic). - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "pm"] - resolver = "2" - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [features] - feat1 = [] - "#, - ) - .file("foo/src/lib.rs", "") - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - - [lib] - proc-macro = true - - [dependencies] - foo = { path = "../foo", features=["feat1"] } - "#, - ) - .file("pm/src/lib.rs", "") - .build(); - - p.cargo("check -p pm -v") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]--cfg[..]feat1[..]") - .run(); - // This may be surprising that `foo` doesn't get built separately. It is - // because pm might have other units (binaries, tests, etc.), and so the - // feature resolver must assume that normal deps get unified with it. This - // is related to the bigger issue where the features selected in a - // workspace depend on which packages are selected. - p.cargo("check --workspace -v") - .with_stderr( - "\ -[FRESH] foo v0.1.0 [..] -[FRESH] pm v0.1.0 [..] -[FINISHED] dev [..] -", - ) - .run(); - // Selecting just foo will build without unification. - p.cargo("check -p foo -v") - // Make sure `foo` is built without feat1 - .with_stderr_line_without(&["[RUNNING] `rustc --crate-name foo"], &["--cfg[..]feat1"]) - .run(); -} - -#[cargo_test] -fn has_dev_dep_for_test() { - // Check for a bug where the decision on whether or not "dev dependencies" - // should be used did not consider `check --profile=test`. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dev-dependencies] - dep = { path = 'dep', features = ['f1'] } - "#, - ) - .file( - "src/lib.rs", - r#" - #[test] - fn t1() { - dep::f(); - } - "#, - ) - .file( - "dep/Cargo.toml", - r#" - [package] - name = "dep" - version = "0.1.0" - - [features] - f1 = [] - "#, - ) - .file( - "dep/src/lib.rs", - r#" - #[cfg(feature = "f1")] - pub fn f() {} - "#, - ) - .build(); - - p.cargo("check -v") - .with_stderr( - "\ -[CHECKING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo [..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("check -v --profile=test") - .with_stderr( - "\ -[CHECKING] dep v0.1.0 [..] -[RUNNING] `rustc --crate-name dep [..] -[CHECKING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo [..] -[FINISHED] [..] -", - ) - .run(); - - // New resolver should not be any different. - switch_to_resolver_2(&p); - p.cargo("check -v --profile=test") - .with_stderr( - "\ -[FRESH] dep [..] -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_dep_activated() { - // Build dependencies always match the host for [target.*.build-dependencies]. - if cross_compile::disabled() { - return; - } - Package::new("somedep", "1.0.0") - .file("src/lib.rs", "") - .publish(); - Package::new("targetdep", "1.0.0").publish(); - Package::new("hostdep", "1.0.0") - // Check that "for_host" is sticky. - .target_dep("somedep", "1.0", rustc_host()) - .feature("feat1", &[]) - .file( - "src/lib.rs", - r#" - extern crate somedep; - - #[cfg(not(feature="feat1"))] - compile_error!{"feat1 missing"} - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - # This should never be selected. - [target.'{}'.build-dependencies] - targetdep = "1.0" - - [target.'{}'.build-dependencies] - hostdep = {{version="1.0", features=["feat1"]}} - "#, - alternate(), - rustc_host() - ), - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - p.cargo("check").run(); - p.cargo("check --target").arg(alternate()).run(); - - // New behavior. - switch_to_resolver_2(&p); - p.cargo("check").run(); - p.cargo("check --target").arg(alternate()).run(); -} - -#[cargo_test] -fn resolver_bad_setting() { - // Unknown setting in `resolver` - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - `resolver` setting `foo` is not valid, valid options are \"1\" or \"2\" -", - ) - .run(); -} - -#[cargo_test] -fn resolver_original() { - // resolver="1" uses old unification behavior. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .file( - "src/lib.rs", - r#" - #[cfg(feature = "f1")] - compile_error!("f1 should not activate"); - "#, - ) - .publish(); - - Package::new("bar", "1.0.0") - .add_dep( - Dependency::new("common", "1.0") - .target("cfg(whatever)") - .enable_features(&["f1"]), - ) - .publish(); - - let manifest = |resolver| { - format!( - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "{}" - - [dependencies] - common = "1.0" - bar = "1.0" - "#, - resolver - ) - }; - - let p = project() - .file("Cargo.toml", &manifest("1")) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr_contains("[..]f1 should not activate[..]") - .run(); - - p.change_file("Cargo.toml", &manifest("2")); - - p.cargo("check").run(); -} - -#[cargo_test] -fn resolver_not_both() { - // Can't specify resolver in both workspace and package. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - resolver = "2" - [package] - name = "foo" - version = "0.1.0" - resolver = "2" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - cannot specify `resolver` field in both `[workspace]` and `[package]` -", - ) - .run(); -} - -#[cargo_test] -fn resolver_ws_member() { - // Can't specify `resolver` in a ws member. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - resolver = "2" - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_stderr( - "\ -warning: resolver for the non root package will be ignored, specify resolver at the workspace root: -package: [..]/foo/a/Cargo.toml -workspace: [..]/foo/Cargo.toml -[CHECKING] a v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn resolver_ws_root_and_member() { - // Check when specified in both ws root and member. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - resolver = "2" - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - resolver = "2" - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - // Ignores if they are the same. - p.cargo("check") - .with_stderr( - "\ -[CHECKING] a v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn resolver_enables_new_features() { - // resolver="2" enables all the things. - Package::new("common", "1.0.0") - .feature("normal", &[]) - .feature("build", &[]) - .feature("dev", &[]) - .feature("itarget", &[]) - .file( - "src/lib.rs", - r#" - pub fn feats() -> u32 { - let mut res = 0; - if cfg!(feature="normal") { res |= 1; } - if cfg!(feature="build") { res |= 2; } - if cfg!(feature="dev") { res |= 4; } - if cfg!(feature="itarget") { res |= 8; } - res - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - resolver = "2" - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - edition = "2018" - - [dependencies] - common = {version = "1.0", features=["normal"]} - - [dev-dependencies] - common = {version = "1.0", features=["dev"]} - - [build-dependencies] - common = {version = "1.0", features=["build"]} - - [target.'cfg(whatever)'.dependencies] - common = {version = "1.0", features=["itarget"]} - "#, - ) - .file( - "a/src/main.rs", - r#" - fn main() { - expect(); - } - - fn expect() { - let expected: u32 = std::env::var("EXPECTED_FEATS").unwrap().parse().unwrap(); - assert_eq!(expected, common::feats()); - } - - #[test] - fn from_test() { - expect(); - } - "#, - ) - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [features] - ping = [] - "#, - ) - .file( - "b/src/main.rs", - r#" - fn main() { - if cfg!(feature="ping") { - println!("pong"); - } - } - "#, - ) - .build(); - - // Only normal. - p.cargo("run --bin a") - .env("EXPECTED_FEATS", "1") - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] common [..] -[COMPILING] common v1.0.0 -[COMPILING] a v0.1.0 [..] -[FINISHED] [..] -[RUNNING] `target/debug/a[EXE]` -", - ) - .run(); - - // only normal+dev - p.cargo("test").cwd("a").env("EXPECTED_FEATS", "5").run(); - - // Can specify features of packages from a different directory. - p.cargo("run -p b --features=ping") - .cwd("a") - .with_stdout("pong") - .run(); -} - -#[cargo_test] -fn install_resolve_behavior() { - // install honors the resolver behavior. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .file( - "src/lib.rs", - r#" - #[cfg(feature = "f1")] - compile_error!("f1 should not activate"); - "#, - ) - .publish(); - - Package::new("bar", "1.0.0").dep("common", "1.0").publish(); - - Package::new("foo", "1.0.0") - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - resolver = "2" - - [target.'cfg(whatever)'.dependencies] - common = {version="1.0", features=["f1"]} - - [dependencies] - bar = "1.0" - - "#, - ) - .file("src/main.rs", "fn main() {}") - .publish(); - - cargo_process("install foo").run(); -} - -#[cargo_test] -fn package_includes_resolve_behavior() { - // `cargo package` will inherit the correct resolve behavior. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - resolver = "2" - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - authors = ["Zzz"] - description = "foo" - license = "MIT" - homepage = "https://example.com/" - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("package").cwd("a").run(); - - let rewritten_toml = format!( - r#"{} -[package] -name = "a" -version = "0.1.0" -authors = ["Zzz"] -description = "foo" -homepage = "https://example.com/" -license = "MIT" -resolver = "2" -"#, - cargo::core::package::MANIFEST_PREAMBLE - ); - - let f = File::open(&p.root().join("target/package/a-0.1.0.crate")).unwrap(); - validate_crate_contents( - f, - "a-0.1.0.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], - &[("Cargo.toml", &rewritten_toml)], - ); -} - -#[cargo_test] -fn tree_all() { - // `cargo tree` with the new feature resolver. - Package::new("log", "0.4.8").feature("serde", &[]).publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "2" - - [target.'cfg(whatever)'.dependencies] - log = {version="*", features=["serde"]} - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("tree --target=all") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ log v0.4.8 -", - ) - .run(); -} - -#[cargo_test] -fn shared_dep_same_but_dependencies() { - // Checks for a bug of nondeterminism. This scenario creates a shared - // dependency `dep` which needs to be built twice (once as normal, and - // once as a build dep). However, in both cases the flags to `dep` are the - // same, the only difference is what it links to. The normal dependency - // should link to `subdep` with the feature disabled, and the build - // dependency should link to it with it enabled. Crucially, the `--target` - // flag should not be specified, otherwise Unit.kind would be different - // and avoid the collision, and this bug won't manifest. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bin1", "bin2"] - resolver = "2" - "#, - ) - .file( - "bin1/Cargo.toml", - r#" - [package] - name = "bin1" - version = "0.1.0" - - [dependencies] - dep = { path = "../dep" } - "#, - ) - .file("bin1/src/main.rs", "fn main() { dep::feat_func(); }") - .file( - "bin2/Cargo.toml", - r#" - [package] - name = "bin2" - version = "0.1.0" - - [build-dependencies] - dep = { path = "../dep" } - subdep = { path = "../subdep", features = ["feat"] } - "#, - ) - .file("bin2/build.rs", "fn main() { dep::feat_func(); }") - .file("bin2/src/main.rs", "fn main() {}") - .file( - "dep/Cargo.toml", - r#" - [package] - name = "dep" - version = "0.1.0" - - [dependencies] - subdep = { path = "../subdep" } - "#, - ) - .file( - "dep/src/lib.rs", - "pub fn feat_func() { subdep::feat_func(); }", - ) - .file( - "subdep/Cargo.toml", - r#" - [package] - name = "subdep" - version = "0.1.0" - - [features] - feat = [] - "#, - ) - .file( - "subdep/src/lib.rs", - r#" - pub fn feat_func() { - #[cfg(feature = "feat")] println!("cargo:warning=feat: enabled"); - #[cfg(not(feature = "feat"))] println!("cargo:warning=feat: not enabled"); - } - "#, - ) - .build(); - - p.cargo("build --bin bin1 --bin bin2") - // unordered because bin1 and bin2 build at the same time - .with_stderr_unordered( - "\ -[COMPILING] subdep [..] -[COMPILING] dep [..] -[COMPILING] bin2 [..] -[COMPILING] bin1 [..] -warning: feat: enabled -[FINISHED] [..] -", - ) - .run(); - p.process(p.bin("bin1")) - .with_stdout("cargo:warning=feat: not enabled") - .run(); - - // Make sure everything stays cached. - p.cargo("build -v --bin bin1 --bin bin2") - .with_stderr_unordered( - "\ -[FRESH] subdep [..] -[FRESH] dep [..] -[FRESH] bin1 [..] -warning: feat: enabled -[FRESH] bin2 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn test_proc_macro() { - // Running `cargo test` on a proc-macro, with a shared dependency that has - // different features. - // - // There was a bug where `shared` was built twice (once with feature "B" - // and once without), and both copies linked into the unit test. This - // would cause a type failure when used in an intermediate dependency - // (the-macro-support). - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "runtime" - version = "0.1.0" - resolver = "2" - - [dependencies] - the-macro = { path = "the-macro", features = ['a'] } - [build-dependencies] - shared = { path = "shared", features = ['b'] } - "#, - ) - .file("src/lib.rs", "") - .file( - "the-macro/Cargo.toml", - r#" - [package] - name = "the-macro" - version = "0.1.0" - [lib] - proc-macro = true - test = false - [dependencies] - the-macro-support = { path = "../the-macro-support" } - shared = { path = "../shared" } - [dev-dependencies] - runtime = { path = ".." } - [features] - a = [] - "#, - ) - .file( - "the-macro/src/lib.rs", - " - fn _test() { - the_macro_support::foo(shared::Foo); - } - ", - ) - .file( - "the-macro-support/Cargo.toml", - r#" - [package] - name = "the-macro-support" - version = "0.1.0" - [dependencies] - shared = { path = "../shared" } - "#, - ) - .file( - "the-macro-support/src/lib.rs", - " - pub fn foo(_: shared::Foo) {} - ", - ) - .file( - "shared/Cargo.toml", - r#" - [package] - name = "shared" - version = "0.1.0" - [features] - b = [] - "#, - ) - .file("shared/src/lib.rs", "pub struct Foo;") - .build(); - p.cargo("test --manifest-path the-macro/Cargo.toml").run(); -} - -#[cargo_test] -fn doc_optional() { - // Checks for a bug where `cargo doc` was failing with an inactive target - // that enables a shared optional dependency. - Package::new("spin", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("spin", "1.0").optional(true)) - .publish(); - // The enabler package enables the `spin` feature, which we don't want. - Package::new("enabler", "1.0.0") - .feature_dep("bar", "1.0", &["spin"]) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "2" - - [target.'cfg(whatever)'.dependencies] - enabler = "1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("doc") - .with_stderr_unordered( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] spin v1.0.0 [..] -[DOWNLOADED] bar v1.0.0 [..] -[DOCUMENTING] bar v1.0.0 -[CHECKING] bar v1.0.0 -[DOCUMENTING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn minimal_download() { - // Various checks that it only downloads the minimum set of dependencies - // needed in various situations. - // - // This checks several permutations of the different - // host_dep/dev_dep/itarget settings. These 3 are planned to be stabilized - // together, so there isn't much need to be concerned about how the behave - // independently. However, there are some cases where they do behave - // independently. Specifically: - // - // * `cargo test` forces dev_dep decoupling to be disabled. - // * `cargo tree --target=all` forces ignore_inactive_targets off and decouple_dev_deps off. - // * `cargo tree --target=all -e normal` forces ignore_inactive_targets off. - // - // However, `cargo tree` is a little weird because it downloads everything - // anyways. - // - // So to summarize the different permutations: - // - // dev_dep | host_dep | itarget | Notes - // --------|----------|---------|---------------------------- - // | | | -Zfeatures=compare (new resolver should behave same as old) - // | | โœ“ | This scenario should not happen. - // | โœ“ | | `cargo tree --target=all -Zfeatures=all`โ€  - // | โœ“ | โœ“ | `cargo test` - // โœ“ | | | This scenario should not happen. - // โœ“ | | โœ“ | This scenario should not happen. - // โœ“ | โœ“ | | `cargo tree --target=all -e normal -Z features=all`โ€  - // โœ“ | โœ“ | โœ“ | A normal build. - // - // โ€  โ€” However, `cargo tree` downloads everything. - Package::new("normal", "1.0.0").publish(); - Package::new("normal_pm", "1.0.0").publish(); - Package::new("normal_opt", "1.0.0").publish(); - Package::new("dev_dep", "1.0.0").publish(); - Package::new("dev_dep_pm", "1.0.0").publish(); - Package::new("build_dep", "1.0.0").publish(); - Package::new("build_dep_pm", "1.0.0").publish(); - Package::new("build_dep_opt", "1.0.0").publish(); - - Package::new("itarget_normal", "1.0.0").publish(); - Package::new("itarget_normal_pm", "1.0.0").publish(); - Package::new("itarget_dev_dep", "1.0.0").publish(); - Package::new("itarget_dev_dep_pm", "1.0.0").publish(); - Package::new("itarget_build_dep", "1.0.0").publish(); - Package::new("itarget_build_dep_pm", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - normal = "1.0" - normal_pm = "1.0" - normal_opt = { version = "1.0", optional = true } - - [dev-dependencies] - dev_dep = "1.0" - dev_dep_pm = "1.0" - - [build-dependencies] - build_dep = "1.0" - build_dep_pm = "1.0" - build_dep_opt = { version = "1.0", optional = true } - - [target.'cfg(whatever)'.dependencies] - itarget_normal = "1.0" - itarget_normal_pm = "1.0" - - [target.'cfg(whatever)'.dev-dependencies] - itarget_dev_dep = "1.0" - itarget_dev_dep_pm = "1.0" - - [target.'cfg(whatever)'.build-dependencies] - itarget_build_dep = "1.0" - itarget_build_dep_pm = "1.0" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - let clear = || { - cargo_home().join("registry/cache").rm_rf(); - cargo_home().join("registry/src").rm_rf(); - p.build_dir().rm_rf(); - }; - - // none - // Should be the same as `-Zfeatures=all` - p.cargo("check -Zfeatures=compare") - .masquerade_as_nightly_cargo() - .with_stderr_unordered( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] normal_pm v1.0.0 [..] -[DOWNLOADED] normal v1.0.0 [..] -[DOWNLOADED] build_dep_pm v1.0.0 [..] -[DOWNLOADED] build_dep v1.0.0 [..] -[COMPILING] build_dep v1.0.0 -[COMPILING] build_dep_pm v1.0.0 -[CHECKING] normal_pm v1.0.0 -[CHECKING] normal v1.0.0 -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - clear(); - - // New behavior - switch_to_resolver_2(&p); - - // all - p.cargo("check") - .with_stderr_unordered( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] normal_pm v1.0.0 [..] -[DOWNLOADED] normal v1.0.0 [..] -[DOWNLOADED] build_dep_pm v1.0.0 [..] -[DOWNLOADED] build_dep v1.0.0 [..] -[COMPILING] build_dep v1.0.0 -[COMPILING] build_dep_pm v1.0.0 -[CHECKING] normal v1.0.0 -[CHECKING] normal_pm v1.0.0 -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - clear(); - - // This disables decouple_dev_deps. - p.cargo("test --no-run") - .with_stderr_unordered( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] normal_pm v1.0.0 [..] -[DOWNLOADED] normal v1.0.0 [..] -[DOWNLOADED] dev_dep_pm v1.0.0 [..] -[DOWNLOADED] dev_dep v1.0.0 [..] -[DOWNLOADED] build_dep_pm v1.0.0 [..] -[DOWNLOADED] build_dep v1.0.0 [..] -[COMPILING] build_dep v1.0.0 -[COMPILING] build_dep_pm v1.0.0 -[COMPILING] normal_pm v1.0.0 -[COMPILING] normal v1.0.0 -[COMPILING] dev_dep_pm v1.0.0 -[COMPILING] dev_dep v1.0.0 -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - clear(); - - // This disables itarget, but leaves decouple_dev_deps enabled. - p.cargo("tree -e normal --target=all") - .with_stderr_unordered( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] normal v1.0.0 [..] -[DOWNLOADED] normal_pm v1.0.0 [..] -[DOWNLOADED] build_dep v1.0.0 [..] -[DOWNLOADED] build_dep_pm v1.0.0 [..] -[DOWNLOADED] itarget_normal v1.0.0 [..] -[DOWNLOADED] itarget_normal_pm v1.0.0 [..] -[DOWNLOADED] itarget_build_dep v1.0.0 [..] -[DOWNLOADED] itarget_build_dep_pm v1.0.0 [..] -", - ) - .with_stdout( - "\ -foo v0.1.0 ([ROOT]/foo) -โ”œโ”€โ”€ itarget_normal v1.0.0 -โ”œโ”€โ”€ itarget_normal_pm v1.0.0 -โ”œโ”€โ”€ normal v1.0.0 -โ””โ”€โ”€ normal_pm v1.0.0 -", - ) - .run(); - clear(); - - // This disables itarget and decouple_dev_deps. - p.cargo("tree --target=all") - .with_stderr_unordered( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] normal_pm v1.0.0 [..] -[DOWNLOADED] normal v1.0.0 [..] -[DOWNLOADED] itarget_normal_pm v1.0.0 [..] -[DOWNLOADED] itarget_normal v1.0.0 [..] -[DOWNLOADED] itarget_dev_dep_pm v1.0.0 [..] -[DOWNLOADED] itarget_dev_dep v1.0.0 [..] -[DOWNLOADED] itarget_build_dep_pm v1.0.0 [..] -[DOWNLOADED] itarget_build_dep v1.0.0 [..] -[DOWNLOADED] dev_dep_pm v1.0.0 [..] -[DOWNLOADED] dev_dep v1.0.0 [..] -[DOWNLOADED] build_dep_pm v1.0.0 [..] -[DOWNLOADED] build_dep v1.0.0 [..] -", - ) - .with_stdout( - "\ -foo v0.1.0 ([ROOT]/foo) -โ”œโ”€โ”€ itarget_normal v1.0.0 -โ”œโ”€โ”€ itarget_normal_pm v1.0.0 -โ”œโ”€โ”€ normal v1.0.0 -โ””โ”€โ”€ normal_pm v1.0.0 -[build-dependencies] -โ”œโ”€โ”€ build_dep v1.0.0 -โ”œโ”€โ”€ build_dep_pm v1.0.0 -โ”œโ”€โ”€ itarget_build_dep v1.0.0 -โ””โ”€โ”€ itarget_build_dep_pm v1.0.0 -[dev-dependencies] -โ”œโ”€โ”€ dev_dep v1.0.0 -โ”œโ”€โ”€ dev_dep_pm v1.0.0 -โ”œโ”€โ”€ itarget_dev_dep v1.0.0 -โ””โ”€โ”€ itarget_dev_dep_pm v1.0.0 -", - ) - .run(); - clear(); -} - -#[cargo_test] -fn pm_with_int_shared() { - // This is a somewhat complex scenario of a proc-macro in a workspace with - // an integration test where the proc-macro is used for other things, and - // *everything* is built at once (`--workspace --all-targets - // --all-features`). There was a bug where the UnitFor settings were being - // incorrectly computed based on the order that the graph was traversed. - // - // There are some uncertainties about exactly how proc-macros should behave - // with `--workspace`, see https://github.com/rust-lang/cargo/issues/8312. - // - // This uses a const-eval hack to do compile-time feature checking. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "pm", "shared"] - resolver = "2" - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - pm = { path = "../pm" } - shared = { path = "../shared", features = ["norm-feat"] } - "#, - ) - .file( - "foo/src/lib.rs", - r#" - // foo->shared always has both features set - const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==3) as usize]; - "#, - ) - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - - [lib] - proc-macro = true - - [dependencies] - shared = { path = "../shared", features = ["host-feat"] } - "#, - ) - .file( - "pm/src/lib.rs", - r#" - // pm->shared always has just host - const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==1) as usize]; - "#, - ) - .file( - "pm/tests/pm_test.rs", - r#" - // integration test gets both set - const _CHECK: [(); 0] = [(); 0-!(shared::FEATS==3) as usize]; - "#, - ) - .file( - "shared/Cargo.toml", - r#" - [package] - name = "shared" - version = "0.1.0" - - [features] - norm-feat = [] - host-feat = [] - "#, - ) - .file( - "shared/src/lib.rs", - r#" - pub const FEATS: u32 = { - if cfg!(feature="norm-feat") && cfg!(feature="host-feat") { - 3 - } else if cfg!(feature="norm-feat") { - 2 - } else if cfg!(feature="host-feat") { - 1 - } else { - 0 - } - }; - "#, - ) - .build(); - - p.cargo("build --workspace --all-targets --all-features -v") - .with_stderr_unordered( - "\ -[COMPILING] shared [..] -[RUNNING] `rustc --crate-name shared [..]--crate-type lib [..] -[RUNNING] `rustc --crate-name shared [..]--crate-type lib [..] -[RUNNING] `rustc --crate-name shared [..]--test[..] -[COMPILING] pm [..] -[RUNNING] `rustc --crate-name pm [..]--crate-type proc-macro[..] -[RUNNING] `rustc --crate-name pm [..]--test[..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo [..]--test[..] -[RUNNING] `rustc --crate-name pm_test [..]--test[..] -[RUNNING] `rustc --crate-name foo [..]--crate-type lib[..] -[FINISHED] [..] -", - ) - .run(); - - // And again, should stay fresh. - p.cargo("build --workspace --all-targets --all-features -v") - .with_stderr_unordered( - "\ -[FRESH] shared [..] -[FRESH] pm [..] -[FRESH] foo [..] -[FINISHED] [..]", - ) - .run(); -} - -#[cargo_test] -fn doc_proc_macro() { - // Checks for a bug when documenting a proc-macro with a dependency. The - // doc unit builder was not carrying the "for host" setting through the - // dependencies, and the `pm-dep` dependency was causing a panic because - // it was looking for target features instead of host features. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "2" - - [dependencies] - pm = { path = "pm" } - "#, - ) - .file("src/lib.rs", "") - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - - [lib] - proc-macro = true - - [dependencies] - pm-dep = { path = "../pm-dep" } - "#, - ) - .file("pm/src/lib.rs", "") - .file("pm-dep/Cargo.toml", &basic_manifest("pm-dep", "0.1.0")) - .file("pm-dep/src/lib.rs", "") - .build(); - - // Unfortunately this cannot check the output because what it prints is - // nondeterministic. Sometimes it says "Compiling pm-dep" and sometimes - // "Checking pm-dep". This is because it is both building it and checking - // it in parallel (building so it can build the proc-macro, and checking - // so rustdoc can load it). - p.cargo("doc").run(); -} - -#[cargo_test] -fn edition_2021_default_2() { - // edition = 2021 defaults to v2 resolver. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .file("src/lib.rs", "") - .publish(); - - Package::new("bar", "1.0.0") - .add_dep( - Dependency::new("common", "1.0") - .target("cfg(whatever)") - .enable_features(&["f1"]), - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - common = "1.0" - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // First without edition. - p.cargo("tree -f") - .arg("{p} feats:{f}") - .with_stdout( - "\ -foo v0.1.0 [..] -โ”œโ”€โ”€ bar v1.0.0 feats: -โ””โ”€โ”€ common v1.0.0 feats:f1 -", - ) - .run(); - - p.change_file( - "Cargo.toml", - r#" - cargo-features = ["edition2021"] - - [package] - name = "foo" - version = "0.1.0" - edition = "2021" - - [dependencies] - common = "1.0" - bar = "1.0" - "#, - ); - - // Importantly, this does not include `f1` on `common`. - p.cargo("tree -f") - .arg("{p} feats:{f}") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -foo v0.1.0 [..] -โ”œโ”€โ”€ bar v1.0.0 feats: -โ””โ”€โ”€ common v1.0.0 feats: -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features_namespaced.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features_namespaced.rs deleted file mode 100644 index 5b5ea47e7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/features_namespaced.rs +++ /dev/null @@ -1,1195 +0,0 @@ -//! Tests for namespaced features. - -use super::features2::switch_to_resolver_2; -use cargo_test_support::registry::{Dependency, Package}; -use cargo_test_support::{project, publish}; - -#[cargo_test] -fn gated() { - // Need namespaced-features to use `dep:` syntax. - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional = true } - - [features] - foo = ["dep:bar"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - namespaced features with the `dep:` prefix are only allowed on the nightly channel \ - and requires the `-Z namespaced-features` flag on the command-line -", - ) - .run(); -} - -#[cargo_test] -fn dependency_gate_ignored() { - // Dependencies with `dep:` features are ignored in the registry if not on nightly. - Package::new("baz", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("feat", &["dep:baz"]) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] no matching package named `bar` found -location searched: registry `crates-io` -required by package `foo v0.1.0 ([..]/foo)` -", - ) - .run(); - - // Publish a version without namespaced features, it should ignore 1.0.0 - // and use this instead. - Package::new("bar", "1.0.1") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("feat", &["baz"]) - .publish(); - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar [..] -[CHECKING] bar v1.0.1 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn dependency_with_crate_syntax() { - // Registry dependency uses dep: syntax. - Package::new("baz", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("feat", &["dep:baz"]) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = {version="1.0", features=["feat"]} - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] [..] -[DOWNLOADED] [..] -[CHECKING] baz v1.0.0 -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn namespaced_invalid_feature() { - // Specifies a feature that doesn't exist. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - bar = ["baz"] - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - feature `bar` includes `baz` which is neither a dependency nor another feature -", - ) - .run(); -} - -#[cargo_test] -fn namespaced_invalid_dependency() { - // Specifies a dep:name that doesn't exist. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [features] - bar = ["dep:baz"] - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - feature `bar` includes `dep:baz`, but `baz` is not listed as a dependency -", - ) - .run(); -} - -#[cargo_test] -fn namespaced_non_optional_dependency() { - // Specifies a dep:name for a dependency that is not optional. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [features] - bar = ["dep:baz"] - - [dependencies] - baz = "0.1" - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - feature `bar` includes `dep:baz`, but `baz` is not an optional dependency - A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition. -", - ) - .run(); -} - -#[cargo_test] -fn namespaced_implicit_feature() { - // Backwards-compatible with old syntax. - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [features] - bar = ["baz"] - - [dependencies] - baz = { version = "0.1", optional = true } - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("check -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[CHECKING] foo v0.0.1 [..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("check -Z namespaced-features --features baz") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.1.0 [..] -[CHECKING] baz v0.1.0 -[CHECKING] foo v0.0.1 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn namespaced_shadowed_dep() { - // An optional dependency is not listed in the features table, and its - // implicit feature is overridden. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [features] - baz = [] - - [dependencies] - baz = { version = "0.1", optional = true } - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - optional dependency `baz` is not included in any feature - Make sure that `dep:baz` is included in one of features in the [features] table. -", - ) - .run(); -} - -#[cargo_test] -fn namespaced_shadowed_non_optional() { - // Able to specify a feature with the same name as a required dependency. - Package::new("baz", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [features] - baz = [] - - [dependencies] - baz = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check -Z namespaced-features") - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn namespaced_implicit_non_optional() { - // Includes a non-optional dependency in [features] table. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [features] - bar = ["baz"] - - [dependencies] - baz = "0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -Z namespaced-features").masquerade_as_nightly_cargo().with_status(101).with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - feature `bar` includes `baz`, but `baz` is not an optional dependency - A non-optional dependency of the same name is defined; consider adding `optional = true` to its definition. -", - ).run(); -} - -#[cargo_test] -fn namespaced_same_name() { - // Explicitly listing an optional dependency in the [features] table. - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [features] - baz = ["dep:baz"] - - [dependencies] - baz = { version = "0.1", optional = true } - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(feature="baz") { println!("baz"); } - } - "#, - ) - .build(); - - p.cargo("run -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[COMPILING] foo v0.0.1 [..] -[FINISHED] [..] -[RUNNING] [..] -", - ) - .with_stdout("") - .run(); - - p.cargo("run -Z namespaced-features --features baz") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.1.0 [..] -[COMPILING] baz v0.1.0 -[COMPILING] foo v0.0.1 [..] -[FINISHED] [..] -[RUNNING] [..] -", - ) - .with_stdout("baz") - .run(); -} - -#[cargo_test] -fn no_implicit_feature() { - // Using `dep:` will not create an implicit feature. - Package::new("regex", "1.0.0").publish(); - Package::new("lazy_static", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - regex = { version = "1.0", optional = true } - lazy_static = { version = "1.0", optional = true } - - [features] - regex = ["dep:regex", "dep:lazy_static"] - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(feature = "regex") { println!("regex"); } - if cfg!(feature = "lazy_static") { println!("lazy_static"); } - } - "#, - ) - .build(); - - p.cargo("run -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -[RUNNING] `target/debug/foo[EXE]` -", - ) - .with_stdout("") - .run(); - - p.cargo("run -Z namespaced-features --features regex") - .masquerade_as_nightly_cargo() - .with_stderr_unordered( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] regex v1.0.0 [..] -[DOWNLOADED] lazy_static v1.0.0 [..] -[COMPILING] regex v1.0.0 -[COMPILING] lazy_static v1.0.0 -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -[RUNNING] `target/debug/foo[EXE]` -", - ) - .with_stdout("regex") - .run(); - - p.cargo("run -Z namespaced-features --features lazy_static") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[ERROR] Package `foo v0.1.0 [..]` does not have feature `lazy_static`. \ -It has an optional dependency with that name, but that dependency uses the \"dep:\" \ -syntax in the features table, so it does not have an implicit feature with that name. -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn crate_syntax_bad_name() { - // "dep:bar" = [] - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version="1.0", optional=true } - - [features] - "dep:bar" = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check -Z namespaced-features --features dep:bar") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at [..]/foo/Cargo.toml` - -Caused by: - feature named `dep:bar` is not allowed to start with `dep:` -", - ) - .run(); -} - -#[cargo_test] -fn crate_syntax_in_dep() { - // features = ["dep:baz"] - Package::new("baz", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", features = ["dep:baz"] } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[CWD]/Cargo.toml` - -Caused by: - feature `dep:baz` in dependency `bar` is not allowed to use explicit `dep:` syntax - If you want to enable [..] -", - ) - .run(); -} - -#[cargo_test] -fn crate_syntax_cli() { - // --features dep:bar - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional=true } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check -Z namespaced-features --features dep:bar") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] feature `dep:bar` is not allowed to use explicit `dep:` syntax -", - ) - .run(); - - switch_to_resolver_2(&p); - p.cargo("check -Z namespaced-features --features dep:bar") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] feature `dep:bar` is not allowed to use explicit `dep:` syntax -", - ) - .run(); -} - -#[cargo_test] -fn crate_required_features() { - // required-features = ["dep:bar"] - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional=true } - - [[bin]] - name = "foo" - required-features = ["dep:bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("check -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] invalid feature `dep:bar` in required-features of target `foo`: \ -`dep:` prefixed feature values are not allowed in required-features -", - ) - .run(); -} - -#[cargo_test] -fn json_exposed() { - // Checks that the implicit dep: values are exposed in JSON. - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional=true } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("metadata -Z namespaced-features --no-deps") - .masquerade_as_nightly_cargo() - .with_json( - r#" - { - "packages": [ - { - "name": "foo", - "version": "0.1.0", - "id": "foo 0.1.0 [..]", - "license": null, - "license_file": null, - "description": null, - "homepage": null, - "documentation": null, - "source": null, - "dependencies": "{...}", - "targets": "{...}", - "features": { - "bar": ["dep:bar"] - }, - "manifest_path": "[..]foo/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "edition": "2015", - "links": null - } - ], - "workspace_members": "{...}", - "resolve": null, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]foo", - "metadata": null - } - "#, - ) - .run(); -} - -#[cargo_test] -fn crate_feature_with_explicit() { - // crate_name/feat_name syntax where crate_name already has a feature defined. - // NOTE: I don't know if this is actually ideal behavior. - Package::new("bar", "1.0.0") - .feature("bar_feat", &[]) - .file( - "src/lib.rs", - r#" - #[cfg(not(feature="bar_feat"))] - compile_error!("bar_feat is not enabled"); - "#, - ) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version="1.0", optional = true } - - [features] - f1 = ["bar/bar_feat"] - bar = ["dep:bar", "f2"] - f2 = [] - "#, - ) - .file( - "src/lib.rs", - r#" - #[cfg(not(feature="bar"))] - compile_error!("bar should be enabled"); - - #[cfg(not(feature="f2"))] - compile_error!("f2 should be enabled"); - "#, - ) - .build(); - - p.cargo("check -Z namespaced-features --features f1") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.0 [..] -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn optional_explicit_without_crate() { - // "feat" syntax when there is no implicit "feat" feature because it is - // explicitly listed elsewhere. - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional = true } - - [features] - feat1 = ["dep:bar"] - feat2 = ["bar"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at [..] - -Caused by: - feature `feat2` includes `bar`, but `bar` is an optional dependency without an implicit feature - Use `dep:bar` to enable the dependency. -", - ) - .run(); -} - -#[cargo_test] -fn tree() { - Package::new("baz", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("feat1", &["dep:baz"]) - .feature("feat2", &[]) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", features = ["feat1"], optional=true } - - [features] - a = ["bar/feat2"] - bar = ["dep:bar"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree -e features -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stdout("foo v0.1.0 ([ROOT]/foo)") - .run(); - - p.cargo("tree -e features --features a -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -foo v0.1.0 ([ROOT]/foo) -โ”œโ”€โ”€ bar feature \"default\" -โ”‚ โ””โ”€โ”€ bar v1.0.0 -โ”‚ โ””โ”€โ”€ baz feature \"default\" -โ”‚ โ””โ”€โ”€ baz v1.0.0 -โ””โ”€โ”€ bar feature \"feat1\" - โ””โ”€โ”€ bar v1.0.0 (*) -", - ) - .run(); - - p.cargo("tree -e features --features a -i bar -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -bar v1.0.0 -โ”œโ”€โ”€ bar feature \"default\" -โ”‚ โ””โ”€โ”€ foo v0.1.0 ([ROOT]/foo) -โ”‚ โ”œโ”€โ”€ foo feature \"a\" (command-line) -โ”‚ โ”œโ”€โ”€ foo feature \"bar\" -โ”‚ โ”‚ โ””โ”€โ”€ foo feature \"a\" (command-line) -โ”‚ โ””โ”€โ”€ foo feature \"default\" (command-line) -โ”œโ”€โ”€ bar feature \"feat1\" -โ”‚ โ””โ”€โ”€ foo v0.1.0 ([ROOT]/foo) (*) -โ””โ”€โ”€ bar feature \"feat2\" - โ””โ”€โ”€ foo feature \"a\" (command-line) -", - ) - .run(); - - p.cargo("tree -e features --features bar -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -foo v0.1.0 ([ROOT]/foo) -โ”œโ”€โ”€ bar feature \"default\" -โ”‚ โ””โ”€โ”€ bar v1.0.0 -โ”‚ โ””โ”€โ”€ baz feature \"default\" -โ”‚ โ””โ”€โ”€ baz v1.0.0 -โ””โ”€โ”€ bar feature \"feat1\" - โ””โ”€โ”€ bar v1.0.0 (*) -", - ) - .run(); - - p.cargo("tree -e features --features bar -i bar -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -bar v1.0.0 -โ”œโ”€โ”€ bar feature \"default\" -โ”‚ โ””โ”€โ”€ foo v0.1.0 ([ROOT]/foo) -โ”‚ โ”œโ”€โ”€ foo feature \"bar\" (command-line) -โ”‚ โ””โ”€โ”€ foo feature \"default\" (command-line) -โ””โ”€โ”€ bar feature \"feat1\" - โ””โ”€โ”€ foo v0.1.0 ([ROOT]/foo) (*) -", - ) - .run(); -} - -#[cargo_test] -fn tree_no_implicit() { - // tree without an implicit feature - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional=true } - - [features] - a = ["dep:bar"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree -e features -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stdout("foo v0.1.0 ([ROOT]/foo)") - .run(); - - p.cargo("tree -e features --all-features -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -foo v0.1.0 ([ROOT]/foo) -โ””โ”€โ”€ bar feature \"default\" - โ””โ”€โ”€ bar v1.0.0 -", - ) - .run(); - - p.cargo("tree -e features -i bar --all-features -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -bar v1.0.0 -โ””โ”€โ”€ bar feature \"default\" - โ””โ”€โ”€ foo v0.1.0 ([ROOT]/foo) - โ””โ”€โ”€ foo feature \"a\" (command-line) -", - ) - .run(); -} - -#[cargo_test] -fn publish_no_implicit() { - // Does not include implicit features or dep: syntax on publish. - Package::new("opt-dep1", "1.0.0").publish(); - Package::new("opt-dep2", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - description = "foo" - license = "MIT" - homepage = "https://example.com/" - - [dependencies] - opt-dep1 = { version = "1.0", optional = true } - opt-dep2 = { version = "1.0", optional = true } - - [features] - feat = ["opt-dep1"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --token sekrit") - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.1.0 [..] -[UPLOADING] foo v0.1.0 [..] -", - ) - .run(); - - publish::validate_upload_with_contents( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [ - { - "default_features": true, - "features": [], - "kind": "normal", - "name": "opt-dep1", - "optional": true, - "registry": "https://github.com/rust-lang/crates.io-index", - "target": null, - "version_req": "^1.0" - }, - { - "default_features": true, - "features": [], - "kind": "normal", - "name": "opt-dep2", - "optional": true, - "registry": "https://github.com/rust-lang/crates.io-index", - "target": null, - "version_req": "^1.0" - } - ], - "description": "foo", - "documentation": null, - "features": { - "feat": ["opt-dep1"] - }, - "homepage": "https://example.com/", - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "vers": "0.1.0" - } - "#, - "foo-0.1.0.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], - &[( - "Cargo.toml", - &format!( - r#"{} -[package] -name = "foo" -version = "0.1.0" -description = "foo" -homepage = "https://example.com/" -license = "MIT" -[dependencies.opt-dep1] -version = "1.0" -optional = true - -[dependencies.opt-dep2] -version = "1.0" -optional = true - -[features] -feat = ["opt-dep1"] -"#, - cargo::core::package::MANIFEST_PREAMBLE - ), - )], - ); -} - -#[cargo_test] -fn publish() { - // Publish behavior with explicit dep: syntax. - Package::new("bar", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - description = "foo" - license = "MIT" - homepage = "https://example.com/" - - [dependencies] - bar = { version = "1.0", optional = true } - - [features] - feat1 = [] - feat2 = ["dep:bar"] - feat3 = ["feat2"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --token sekrit -Z namespaced-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.1.0 [..] -[VERIFYING] foo v0.1.0 [..] -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -[UPLOADING] foo v0.1.0 [..] -", - ) - .run(); - - publish::validate_upload_with_contents( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [ - { - "default_features": true, - "features": [], - "kind": "normal", - "name": "bar", - "optional": true, - "registry": "https://github.com/rust-lang/crates.io-index", - "target": null, - "version_req": "^1.0" - } - ], - "description": "foo", - "documentation": null, - "features": { - "feat1": [], - "feat2": ["dep:bar"], - "feat3": ["feat2"] - }, - "homepage": "https://example.com/", - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "vers": "0.1.0" - } - "#, - "foo-0.1.0.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], - &[( - "Cargo.toml", - &format!( - r#"{} -[package] -name = "foo" -version = "0.1.0" -description = "foo" -homepage = "https://example.com/" -license = "MIT" -[dependencies.bar] -version = "1.0" -optional = true - -[features] -feat1 = [] -feat2 = ["dep:bar"] -feat3 = ["feat2"] -"#, - cargo::core::package::MANIFEST_PREAMBLE - ), - )], - ); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/fetch.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/fetch.rs deleted file mode 100644 index f90131a59..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/fetch.rs +++ /dev/null @@ -1,135 +0,0 @@ -//! Tests for the `cargo fetch` command. - -use cargo_test_support::registry::Package; -use cargo_test_support::rustc_host; -use cargo_test_support::{basic_manifest, cross_compile, project}; - -#[cargo_test] -fn no_deps() { - let p = project() - .file("src/main.rs", "mod a; fn main() {}") - .file("src/a.rs", "") - .build(); - - p.cargo("fetch").with_stdout("").run(); -} - -#[cargo_test] -fn fetch_all_platform_dependencies_when_no_target_is_given() { - if cross_compile::disabled() { - return; - } - - Package::new("d1", "1.2.3") - .file("Cargo.toml", &basic_manifest("d1", "1.2.3")) - .file("src/lib.rs", "") - .publish(); - - Package::new("d2", "0.1.2") - .file("Cargo.toml", &basic_manifest("d2", "0.1.2")) - .file("src/lib.rs", "") - .publish(); - - let target = cross_compile::alternate(); - let host = rustc_host(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.{host}.dependencies] - d1 = "1.2.3" - - [target.{target}.dependencies] - d2 = "0.1.2" - "#, - host = host, - target = target - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fetch") - .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]") - .with_stderr_contains("[DOWNLOADED] d2 v0.1.2 [..]") - .run(); -} - -#[cargo_test] -fn fetch_platform_specific_dependencies() { - if cross_compile::disabled() { - return; - } - - Package::new("d1", "1.2.3") - .file("Cargo.toml", &basic_manifest("d1", "1.2.3")) - .file("src/lib.rs", "") - .publish(); - - Package::new("d2", "0.1.2") - .file("Cargo.toml", &basic_manifest("d2", "0.1.2")) - .file("src/lib.rs", "") - .publish(); - - let target = cross_compile::alternate(); - let host = rustc_host(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.{host}.dependencies] - d1 = "1.2.3" - - [target.{target}.dependencies] - d2 = "0.1.2" - "#, - host = host, - target = target - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fetch --target") - .arg(&host) - .with_stderr_contains("[DOWNLOADED] d1 v1.2.3 [..]") - .with_stderr_does_not_contain("[DOWNLOADED] d2 v0.1.2 [..]") - .run(); - - p.cargo("fetch --target") - .arg(&target) - .with_stderr_contains("[DOWNLOADED] d2 v0.1.2[..]") - .with_stderr_does_not_contain("[DOWNLOADED] d1 v1.2.3 [..]") - .run(); -} - -#[cargo_test] -fn fetch_warning() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - misspelled = "wut" - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("fetch") - .with_stderr("[WARNING] unused manifest key: package.misspelled") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/fix.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/fix.rs deleted file mode 100644 index fce7ab843..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/fix.rs +++ /dev/null @@ -1,1825 +0,0 @@ -//! Tests for the `cargo fix` command. - -use cargo::core::Edition; -use cargo_test_support::compare::assert_match_exact; -use cargo_test_support::git; -use cargo_test_support::paths::{self, CargoPathExt}; -use cargo_test_support::registry::{Dependency, Package}; -use cargo_test_support::tools; -use cargo_test_support::{basic_manifest, is_nightly, project}; - -#[cargo_test] -fn do_not_fix_broken_builds() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn foo() { - let mut x = 3; - drop(x); - } - - pub fn foo2() { - let _x: u32 = "a"; - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .with_status(101) - .with_stderr_contains("[ERROR] could not compile `foo` due to previous error") - .run(); - assert!(p.read_file("src/lib.rs").contains("let mut x = 3;")); -} - -#[cargo_test] -fn fix_broken_if_requested() { - let p = project() - .file( - "src/lib.rs", - r#" - fn foo(a: &u32) -> u32 { a + 1 } - pub fn bar() { - foo(1); - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs --broken-code") - .env("__CARGO_FIX_YOLO", "1") - .run(); -} - -#[cargo_test] -fn broken_fixes_backed_out() { - // This works as follows: - // - Create a `rustc` shim (the "foo" project) which will pretend that the - // verification step fails. - // - There is an empty build script so `foo` has `OUT_DIR` to track the steps. - // - The first "check", `foo` creates a file in OUT_DIR, and it completes - // successfully with a warning diagnostic to remove unused `mut`. - // - rustfix removes the `mut`. - // - The second "check" to verify the changes, `foo` swaps out the content - // with something that fails to compile. It creates a second file so it - // won't do anything in the third check. - // - cargo fix discovers that the fix failed, and it backs out the changes. - // - The third "check" is done to display the original diagnostics of the - // original code. - let p = project() - .file( - "foo/Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - [workspace] - "#, - ) - .file( - "foo/src/main.rs", - r#" - use std::env; - use std::fs; - use std::io::Write; - use std::path::{Path, PathBuf}; - use std::process::{self, Command}; - - fn main() { - // Ignore calls to things like --print=file-names and compiling build.rs. - let is_lib_rs = env::args_os() - .map(PathBuf::from) - .any(|l| l == Path::new("src/lib.rs")); - if is_lib_rs { - let path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); - let first = path.join("first"); - let second = path.join("second"); - if first.exists() && !second.exists() { - fs::write("src/lib.rs", b"not rust code").unwrap(); - fs::File::create(&second).unwrap(); - } else { - fs::File::create(&first).unwrap(); - } - } - - let status = Command::new("rustc") - .args(env::args().skip(1)) - .status() - .expect("failed to run rustc"); - process::exit(status.code().unwrap_or(2)); - } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = 'bar' - version = '0.1.0' - [workspace] - "#, - ) - .file("bar/build.rs", "fn main() {}") - .file( - "bar/src/lib.rs", - r#" - pub fn foo() { - let mut x = 3; - drop(x); - } - "#, - ) - .build(); - - // Build our rustc shim - p.cargo("build").cwd("foo").run(); - - // Attempt to fix code, but our shim will always fail the second compile - p.cargo("fix --allow-no-vcs --lib") - .cwd("bar") - .env("__CARGO_FIX_YOLO", "1") - .env("RUSTC", p.root().join("foo/target/debug/foo")) - .with_stderr_contains( - "warning: failed to automatically apply fixes suggested by rustc \ - to crate `bar`\n\ - \n\ - after fixes were automatically applied the compiler reported \ - errors within these files:\n\ - \n \ - * src/lib.rs\n\ - \n\ - This likely indicates a bug in either rustc or cargo itself,\n\ - and we would appreciate a bug report! You're likely to see \n\ - a number of compiler warnings after this message which cargo\n\ - attempted to fix but failed. If you could open an issue at\n\ - [..]\n\ - quoting the full output of this command we'd be very appreciative!\n\ - Note that you may be able to make some more progress in the near-term\n\ - fixing code with the `--broken-code` flag\n\ - \n\ - The following errors were reported:\n\ - error: expected one of `!` or `::`, found `rust`\n\ - ", - ) - .with_stderr_contains("Original diagnostics will follow.") - .with_stderr_contains("[WARNING] variable does not need to be mutable") - .with_stderr_does_not_contain("[..][FIXED][..]") - .run(); - - // Make sure the fix which should have been applied was backed out - assert!(p.read_file("bar/src/lib.rs").contains("let mut x = 3;")); -} - -#[cargo_test] -fn fix_path_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = 'bar' } - - [workspace] - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar; - - pub fn foo() -> u32 { - let mut x = 3; - x - } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file( - "bar/src/lib.rs", - r#" - pub fn foo() -> u32 { - let mut x = 3; - x - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs -p foo -p bar") - .env("__CARGO_FIX_YOLO", "1") - .with_stdout("") - .with_stderr_unordered( - "\ -[CHECKING] bar v0.1.0 ([..]) -[FIXED] bar/src/lib.rs (1 fix) -[CHECKING] foo v0.1.0 ([..]) -[FIXED] src/lib.rs (1 fix) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn do_not_fix_non_relevant_deps() { - let p = project() - .no_manifest() - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = '../bar' } - - [workspace] - "#, - ) - .file("foo/src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file( - "bar/src/lib.rs", - r#" - pub fn foo() -> u32 { - let mut x = 3; - x - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .cwd("foo") - .run(); - - assert!(p.read_file("bar/src/lib.rs").contains("mut")); -} - -#[cargo_test] -fn prepare_for_2018() { - let p = project() - .file( - "src/lib.rs", - r#" - #![allow(unused)] - - mod foo { - pub const FOO: &str = "fooo"; - } - - mod bar { - use ::foo::FOO; - } - - fn main() { - let x = ::foo::FOO; - } - "#, - ) - .build(); - - let stderr = "\ -[CHECKING] foo v0.0.1 ([..]) -[MIGRATING] src/lib.rs from 2015 edition to 2018 -[FIXED] src/lib.rs (2 fixes) -[FINISHED] [..] -"; - p.cargo("fix --edition --allow-no-vcs") - .with_stderr(stderr) - .with_stdout("") - .run(); - - println!("{}", p.read_file("src/lib.rs")); - assert!(p.read_file("src/lib.rs").contains("use crate::foo::FOO;")); - assert!(p - .read_file("src/lib.rs") - .contains("let x = crate::foo::FOO;")); -} - -#[cargo_test] -fn local_paths() { - let p = project() - .file( - "src/lib.rs", - r#" - use test::foo; - - mod test { - pub fn foo() {} - } - - pub fn f() { - foo(); - } - "#, - ) - .build(); - - p.cargo("fix --edition --allow-no-vcs") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([..]) -[MIGRATING] src/lib.rs from 2015 edition to 2018 -[FIXED] src/lib.rs (1 fix) -[FINISHED] [..] -", - ) - .with_stdout("") - .run(); - - println!("{}", p.read_file("src/lib.rs")); - assert!(p.read_file("src/lib.rs").contains("use crate::test::foo;")); -} - -#[cargo_test] -fn upgrade_extern_crate() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = '2018' - - [workspace] - - [dependencies] - bar = { path = 'bar' } - "#, - ) - .file( - "src/lib.rs", - r#" - #![warn(rust_2018_idioms)] - extern crate bar; - - use bar::bar; - - pub fn foo() { - ::bar::bar(); - bar(); - } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - let stderr = "\ -[CHECKING] bar v0.1.0 ([..]) -[CHECKING] foo v0.1.0 ([..]) -[FIXED] src/lib.rs (1 fix) -[FINISHED] [..] -"; - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .with_stderr(stderr) - .with_stdout("") - .run(); - println!("{}", p.read_file("src/lib.rs")); - assert!(!p.read_file("src/lib.rs").contains("extern crate")); -} - -#[cargo_test] -fn specify_rustflags() { - let p = project() - .file( - "src/lib.rs", - r#" - #![allow(unused)] - - mod foo { - pub const FOO: &str = "fooo"; - } - - fn main() { - let x = ::foo::FOO; - } - "#, - ) - .build(); - - p.cargo("fix --edition --allow-no-vcs") - .env("RUSTFLAGS", "-C linker=cc") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([..]) -[MIGRATING] src/lib.rs from 2015 edition to 2018 -[FIXED] src/lib.rs (1 fix) -[FINISHED] [..] -", - ) - .with_stdout("") - .run(); -} - -#[cargo_test] -fn no_changes_necessary() { - let p = project().file("src/lib.rs", "").build(); - - let stderr = "\ -[CHECKING] foo v0.0.1 ([..]) -[FINISHED] [..] -"; - p.cargo("fix --allow-no-vcs") - .with_stderr(stderr) - .with_stdout("") - .run(); -} - -#[cargo_test] -fn fixes_extra_mut() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn foo() -> u32 { - let mut x = 3; - x - } - "#, - ) - .build(); - - let stderr = "\ -[CHECKING] foo v0.0.1 ([..]) -[FIXED] src/lib.rs (1 fix) -[FINISHED] [..] -"; - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .with_stderr(stderr) - .with_stdout("") - .run(); -} - -#[cargo_test] -fn fixes_two_missing_ampersands() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn foo() -> u32 { - let mut x = 3; - let mut y = 3; - x + y - } - "#, - ) - .build(); - - let stderr = "\ -[CHECKING] foo v0.0.1 ([..]) -[FIXED] src/lib.rs (2 fixes) -[FINISHED] [..] -"; - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .with_stderr(stderr) - .with_stdout("") - .run(); -} - -#[cargo_test] -fn tricky() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn foo() -> u32 { - let mut x = 3; let mut y = 3; - x + y - } - "#, - ) - .build(); - - let stderr = "\ -[CHECKING] foo v0.0.1 ([..]) -[FIXED] src/lib.rs (2 fixes) -[FINISHED] [..] -"; - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .with_stderr(stderr) - .with_stdout("") - .run(); -} - -#[cargo_test] -fn preserve_line_endings() { - let p = project() - .file( - "src/lib.rs", - "fn add(a: &u32) -> u32 { a + 1 }\r\n\ - pub fn foo() -> u32 { let mut x = 3; add(&x) }\r\n\ - ", - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .run(); - assert!(p.read_file("src/lib.rs").contains("\r\n")); -} - -#[cargo_test] -fn fix_deny_warnings() { - let p = project() - .file( - "src/lib.rs", - "#![deny(warnings)] - pub fn foo() { let mut x = 3; drop(x); } - ", - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .run(); -} - -#[cargo_test] -fn fix_deny_warnings_but_not_others() { - let p = project() - .file( - "src/lib.rs", - " - #![deny(unused_mut)] - - pub fn foo() -> u32 { - let mut x = 3; - x - } - - pub fn bar() { - #[allow(unused_mut)] - let mut _y = 4; - } - ", - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .run(); - assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;")); - assert!(p.read_file("src/lib.rs").contains("let mut _y = 4;")); -} - -#[cargo_test] -fn fix_two_files() { - let p = project() - .file( - "src/lib.rs", - " - pub mod bar; - - pub fn foo() -> u32 { - let mut x = 3; - x - } - ", - ) - .file( - "src/bar.rs", - " - pub fn foo() -> u32 { - let mut x = 3; - x - } - - ", - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .with_stderr_contains("[FIXED] src/bar.rs (1 fix)") - .with_stderr_contains("[FIXED] src/lib.rs (1 fix)") - .run(); - assert!(!p.read_file("src/lib.rs").contains("let mut x = 3;")); - assert!(!p.read_file("src/bar.rs").contains("let mut x = 3;")); -} - -#[cargo_test] -fn fixes_missing_ampersand() { - let p = project() - .file("src/main.rs", "fn main() { let mut x = 3; drop(x); }") - .file( - "src/lib.rs", - r#" - pub fn foo() { let mut x = 3; drop(x); } - - #[test] - pub fn foo2() { let mut x = 3; drop(x); } - "#, - ) - .file( - "tests/a.rs", - r#" - #[test] - pub fn foo() { let mut x = 3; drop(x); } - "#, - ) - .file("examples/foo.rs", "fn main() { let mut x = 3; drop(x); }") - .file("build.rs", "fn main() { let mut x = 3; drop(x); }") - .build(); - - p.cargo("fix --all-targets --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .with_stdout("") - .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") - .with_stderr_contains("[FIXED] build.rs (1 fix)") - // Don't assert number of fixes for this one, as we don't know if we're - // fixing it once or twice! We run this all concurrently, and if we - // compile (and fix) in `--test` mode first, we get two fixes. Otherwise - // we'll fix one non-test thing, and then fix another one later in - // test mode. - .with_stderr_contains("[FIXED] src/lib.rs[..]") - .with_stderr_contains("[FIXED] src/main.rs (1 fix)") - .with_stderr_contains("[FIXED] examples/foo.rs (1 fix)") - .with_stderr_contains("[FIXED] tests/a.rs (1 fix)") - .with_stderr_contains("[FINISHED] [..]") - .run(); - p.cargo("build").run(); - p.cargo("test").run(); -} - -#[cargo_test] -fn fix_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [features] - bar = [] - - [workspace] - "#, - ) - .file( - "src/lib.rs", - r#" - #[cfg(feature = "bar")] - pub fn foo() -> u32 { let mut x = 3; x } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs").run(); - p.cargo("build").run(); - p.cargo("fix --features bar --allow-no-vcs").run(); - p.cargo("build --features bar").run(); -} - -#[cargo_test] -fn shows_warnings() { - let p = project() - .file( - "src/lib.rs", - "#[deprecated] fn bar() {} pub fn foo() { let _ = bar(); }", - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .with_stderr_contains("[..]warning: use of deprecated[..]") - .run(); -} - -#[cargo_test] -fn warns_if_no_vcs_detected() { - let p = project().file("src/lib.rs", "pub fn foo() {}").build(); - - p.cargo("fix") - .with_status(101) - .with_stderr( - "error: no VCS found for this package and `cargo fix` can potentially perform \ - destructive changes; if you'd like to suppress this error pass `--allow-no-vcs`\ - ", - ) - .run(); - p.cargo("fix --allow-no-vcs").run(); -} - -#[cargo_test] -fn warns_about_dirty_working_directory() { - let p = git::new("foo", |p| p.file("src/lib.rs", "pub fn foo() {}")); - - p.change_file("src/lib.rs", ""); - - p.cargo("fix") - .with_status(101) - .with_stderr( - "\ -error: the working directory of this package has uncommitted changes, \ -and `cargo fix` can potentially perform destructive changes; if you'd \ -like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \ -commit the changes to these files: - - * src/lib.rs (dirty) - - -", - ) - .run(); - p.cargo("fix --allow-dirty").run(); -} - -#[cargo_test] -fn warns_about_staged_working_directory() { - let (p, repo) = git::new_repo("foo", |p| p.file("src/lib.rs", "pub fn foo() {}")); - - p.change_file("src/lib.rs", "pub fn bar() {}"); - git::add(&repo); - - p.cargo("fix") - .with_status(101) - .with_stderr( - "\ -error: the working directory of this package has uncommitted changes, \ -and `cargo fix` can potentially perform destructive changes; if you'd \ -like to suppress this error pass `--allow-dirty`, `--allow-staged`, or \ -commit the changes to these files: - - * src/lib.rs (staged) - - -", - ) - .run(); - p.cargo("fix --allow-staged").run(); -} - -#[cargo_test] -fn does_not_warn_about_clean_working_directory() { - let p = git::new("foo", |p| p.file("src/lib.rs", "pub fn foo() {}")); - p.cargo("fix").run(); -} - -#[cargo_test] -fn does_not_warn_about_dirty_ignored_files() { - let p = git::new("foo", |p| { - p.file("src/lib.rs", "pub fn foo() {}") - .file(".gitignore", "bar\n") - }); - - p.change_file("bar", ""); - - p.cargo("fix").run(); -} - -#[cargo_test] -fn fix_all_targets_by_default() { - let p = project() - .file("src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }") - .file("tests/foo.rs", "pub fn foo() { let mut x = 3; drop(x); }") - .build(); - p.cargo("fix --allow-no-vcs") - .env("__CARGO_FIX_YOLO", "1") - .run(); - assert!(!p.read_file("src/lib.rs").contains("let mut x")); - assert!(!p.read_file("tests/foo.rs").contains("let mut x")); -} - -#[cargo_test] -fn prepare_for_unstable() { - // During the period where a new edition is coming up, but not yet stable, - // this test will verify that it cannot be migrated to on stable. If there - // is no next edition, it does nothing. - let next = match Edition::LATEST_UNSTABLE { - Some(next) => next, - None => { - eprintln!("Next edition is currently not available, skipping test."); - return; - } - }; - let latest_stable = Edition::LATEST_STABLE; - let prev = latest_stable.previous().unwrap(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "{}" - "#, - latest_stable - ), - ) - .file("src/lib.rs", "") - .build(); - - // -j1 to make the error more deterministic (otherwise there can be - // multiple errors since they run in parallel). - p.cargo("fix --edition --allow-no-vcs -j1") - .with_stderr(&format_args!("\ -[CHECKING] foo [..] -[WARNING] `src/lib.rs` is on the latest edition, but trying to migrate to edition {next}. -Edition {next} is unstable and not allowed in this release, consider trying the nightly release channel. - -If you are trying to migrate from the previous edition ({prev}), the -process requires following these steps: - -1. Start with `edition = \"{prev}\"` in `Cargo.toml` -2. Run `cargo fix --edition` -3. Modify `Cargo.toml` to set `edition = \"{latest_stable}\"` -4. Run `cargo build` or `cargo test` to verify the fixes worked - -More details may be found at -https://doc.rust-lang.org/edition-guide/editions/transitioning-an-existing-project-to-a-new-edition.html - -[FINISHED] [..] -", next=next, latest_stable=latest_stable, prev=prev)) - .run(); - - if !is_nightly() { - // The rest of this test is fundamentally always nightly. - return; - } - - p.cargo("fix --edition --allow-no-vcs") - .masquerade_as_nightly_cargo() - .with_stderr(&format!( - "\ -[CHECKING] foo [..] -[MIGRATING] src/lib.rs from {latest_stable} edition to {next} -[FINISHED] [..] -", - latest_stable = latest_stable, - next = next, - )) - .run(); -} - -#[cargo_test] -fn prepare_for_latest_stable() { - // This is the stable counterpart of prepare_for_unstable. - let latest_stable = Edition::LATEST_STABLE; - let previous = latest_stable.previous().unwrap(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = 'foo' - version = '0.1.0' - edition = '{}' - "#, - previous - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fix --edition --allow-no-vcs") - .with_stderr(&format!( - "\ -[CHECKING] foo [..] -[MIGRATING] src/lib.rs from {} edition to {} -[FINISHED] [..] -", - previous, latest_stable - )) - .run(); -} - -#[cargo_test] -fn prepare_for_already_on_latest_unstable() { - // During the period where a new edition is coming up, but not yet stable, - // this test will check what happens if you are already on the latest. If - // there is no next edition, it does nothing. - if !is_nightly() { - // This test is fundamentally always nightly. - return; - } - let next_edition = match Edition::LATEST_UNSTABLE { - Some(next) => next, - None => { - eprintln!("Next edition is currently not available, skipping test."); - return; - } - }; - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - cargo-features = ["edition{}"] - - [package] - name = 'foo' - version = '0.1.0' - edition = '{}' - "#, - next_edition, next_edition - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fix --edition --allow-no-vcs") - .masquerade_as_nightly_cargo() - .with_stderr_contains("[CHECKING] foo [..]") - .with_stderr_contains(&format!( - "\ -[WARNING] `src/lib.rs` is already on the latest edition ({next_edition}), unable to migrate further -", - next_edition = next_edition - )) - .run(); -} - -#[cargo_test] -fn prepare_for_already_on_latest_stable() { - // Stable counterpart of prepare_for_already_on_latest_unstable. - if Edition::LATEST_UNSTABLE.is_some() { - eprintln!("This test cannot run while the latest edition is unstable, skipping."); - return; - } - let latest_stable = Edition::LATEST_STABLE; - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = 'foo' - version = '0.1.0' - edition = '{}' - "#, - latest_stable - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fix --edition --allow-no-vcs") - .with_stderr_contains("[CHECKING] foo [..]") - .with_stderr_contains(&format!( - "\ -[WARNING] `src/lib.rs` is already on the latest edition ({latest_stable}), unable to migrate further -", - latest_stable = latest_stable - )) - .run(); -} - -#[cargo_test] -fn fix_overlapping() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn foo() {} - pub struct A; - - pub mod bar { - pub fn baz() { - ::foo::<::A>(); - } - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs --edition --lib") - .with_stderr( - "\ -[CHECKING] foo [..] -[MIGRATING] src/lib.rs from 2015 edition to 2018 -[FIXED] src/lib.rs (2 fixes) -[FINISHED] dev [..] -", - ) - .run(); - - let contents = p.read_file("src/lib.rs"); - println!("{}", contents); - assert!(contents.contains("crate::foo::()")); -} - -#[cargo_test] -fn fix_idioms() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - edition = '2018' - "#, - ) - .file( - "src/lib.rs", - r#" - use std::any::Any; - pub fn foo() { - let _x: Box = Box::new(3); - } - "#, - ) - .build(); - - let stderr = "\ -[CHECKING] foo [..] -[FIXED] src/lib.rs (1 fix) -[FINISHED] [..] -"; - p.cargo("fix --edition-idioms --allow-no-vcs") - .with_stderr(stderr) - .run(); - - assert!(p.read_file("src/lib.rs").contains("Box")); -} - -#[cargo_test] -fn idioms_2015_ok() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("fix --edition-idioms --allow-no-vcs").run(); -} - -#[cargo_test] -fn shows_warnings_on_second_run_without_changes() { - let p = project() - .file( - "src/lib.rs", - r#" - #[deprecated] - fn bar() {} - - pub fn foo() { - let _ = bar(); - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .with_stderr_contains("[..]warning: use of deprecated[..]") - .run(); - - p.cargo("fix --allow-no-vcs") - .with_stderr_contains("[..]warning: use of deprecated[..]") - .run(); -} - -#[cargo_test] -fn shows_warnings_on_second_run_without_changes_on_multiple_targets() { - let p = project() - .file( - "src/lib.rs", - r#" - #[deprecated] - fn bar() {} - - pub fn foo() { - let _ = bar(); - } - "#, - ) - .file( - "src/main.rs", - r#" - #[deprecated] - fn bar() {} - - fn main() { - let _ = bar(); - } - "#, - ) - .file( - "tests/foo.rs", - r#" - #[deprecated] - fn bar() {} - - #[test] - fn foo_test() { - let _ = bar(); - } - "#, - ) - .file( - "tests/bar.rs", - r#" - #[deprecated] - fn bar() {} - - #[test] - fn foo_test() { - let _ = bar(); - } - "#, - ) - .file( - "examples/fooxample.rs", - r#" - #[deprecated] - fn bar() {} - - fn main() { - let _ = bar(); - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs --all-targets") - .with_stderr_contains(" --> examples/fooxample.rs:6:29") - .with_stderr_contains(" --> src/lib.rs:6:29") - .with_stderr_contains(" --> src/main.rs:6:29") - .with_stderr_contains(" --> tests/bar.rs:7:29") - .with_stderr_contains(" --> tests/foo.rs:7:29") - .run(); - - p.cargo("fix --allow-no-vcs --all-targets") - .with_stderr_contains(" --> examples/fooxample.rs:6:29") - .with_stderr_contains(" --> src/lib.rs:6:29") - .with_stderr_contains(" --> src/main.rs:6:29") - .with_stderr_contains(" --> tests/bar.rs:7:29") - .with_stderr_contains(" --> tests/foo.rs:7:29") - .run(); -} - -#[cargo_test] -fn doesnt_rebuild_dependencies() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = 'bar' } - - [workspace] - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("fix --allow-no-vcs -p foo") - .env("__CARGO_FIX_YOLO", "1") - .with_stdout("") - .with_stderr( - "\ -[CHECKING] bar v0.1.0 ([..]) -[CHECKING] foo v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("fix --allow-no-vcs -p foo") - .env("__CARGO_FIX_YOLO", "1") - .with_stdout("") - .with_stderr( - "\ -[CHECKING] foo v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn does_not_crash_with_rustc_wrapper() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fix --allow-no-vcs") - .env("RUSTC_WRAPPER", tools::echo_wrapper()) - .run(); - p.build_dir().rm_rf(); - p.cargo("fix --allow-no-vcs --verbose") - .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) - .run(); -} - -#[cargo_test] -fn uses_workspace_wrapper_and_primary_wrapper_override() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fix --allow-no-vcs --verbose") - .env("RUSTC_WORKSPACE_WRAPPER", tools::echo_wrapper()) - .with_stderr_contains("WRAPPER CALLED: rustc src/lib.rs --crate-name foo [..]") - .run(); -} - -#[cargo_test] -fn only_warn_for_relevant_crates() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = { path = 'a' } - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - "#, - ) - .file( - "a/src/lib.rs", - " - pub fn foo() {} - pub mod bar { - use foo; - pub fn baz() { foo() } - } - ", - ) - .build(); - - p.cargo("fix --allow-no-vcs --edition") - .with_stderr( - "\ -[CHECKING] a v0.1.0 ([..]) -[CHECKING] foo v0.1.0 ([..]) -[MIGRATING] src/lib.rs from 2015 edition to 2018 -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fix_to_broken_code() { - let p = project() - .file( - "foo/Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - [workspace] - "#, - ) - .file( - "foo/src/main.rs", - r#" - use std::env; - use std::fs; - use std::io::Write; - use std::path::{Path, PathBuf}; - use std::process::{self, Command}; - - fn main() { - let is_lib_rs = env::args_os() - .map(PathBuf::from) - .any(|l| l == Path::new("src/lib.rs")); - if is_lib_rs { - let path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); - let path = path.join("foo"); - if path.exists() { - panic!() - } else { - fs::File::create(&path).unwrap(); - } - } - - let status = Command::new("rustc") - .args(env::args().skip(1)) - .status() - .expect("failed to run rustc"); - process::exit(status.code().unwrap_or(2)); - } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = 'bar' - version = '0.1.0' - [workspace] - "#, - ) - .file("bar/build.rs", "fn main() {}") - .file("bar/src/lib.rs", "pub fn foo() { let mut x = 3; drop(x); }") - .build(); - - // Build our rustc shim - p.cargo("build").cwd("foo").run(); - - // Attempt to fix code, but our shim will always fail the second compile - p.cargo("fix --allow-no-vcs --broken-code") - .cwd("bar") - .env("RUSTC", p.root().join("foo/target/debug/foo")) - .with_status(101) - .with_stderr_contains("[WARNING] failed to automatically apply fixes [..]") - .run(); - - assert_eq!( - p.read_file("bar/src/lib.rs"), - "pub fn foo() { let x = 3; drop(x); }" - ); -} - -#[cargo_test] -fn fix_with_common() { - let p = project() - .file("src/lib.rs", "") - .file( - "tests/t1.rs", - "mod common; #[test] fn t1() { common::try(); }", - ) - .file( - "tests/t2.rs", - "mod common; #[test] fn t2() { common::try(); }", - ) - .file("tests/common/mod.rs", "pub fn try() {}") - .build(); - - p.cargo("fix --edition --allow-no-vcs").run(); - - assert_eq!(p.read_file("tests/common/mod.rs"), "pub fn r#try() {}"); -} - -#[cargo_test] -fn fix_in_existing_repo_weird_ignore() { - // Check that ignore doesn't ignore the repo itself. - let p = git::new("foo", |project| { - project - .file("src/lib.rs", "") - .file(".gitignore", "foo\ninner\n") - .file("inner/file", "") - }); - - p.cargo("fix").run(); - // This is questionable about whether it is the right behavior. It should - // probably be checking if any source file for the current project is - // ignored. - p.cargo("fix") - .cwd("inner") - .with_stderr_contains("[ERROR] no VCS found[..]") - .with_status(101) - .run(); - p.cargo("fix").cwd("src").run(); -} - -#[cargo_test] -fn fix_color_message() { - // Check that color appears in diagnostics. - let p = project() - .file("src/lib.rs", "std::compile_error!{\"color test\"}") - .build(); - - p.cargo("fix --allow-no-vcs --color=always") - .with_stderr_contains("[..]\x1b[[..]") - .with_status(101) - .run(); - - p.cargo("fix --allow-no-vcs --color=never") - .with_stderr_contains("error: color test") - .with_stderr_does_not_contain("[..]\x1b[[..]") - .with_status(101) - .run(); -} - -#[cargo_test] -fn edition_v2_resolver_report() { - // Show a report if the V2 resolver shows differences. - Package::new("common", "1.0.0") - .feature("f1", &[]) - .feature("dev-feat", &[]) - .add_dep(Dependency::new("opt_dep", "1.0").optional(true)) - .publish(); - Package::new("opt_dep", "1.0.0").publish(); - - Package::new("bar", "1.0.0") - .add_dep( - Dependency::new("common", "1.0") - .target("cfg(whatever)") - .enable_features(&["f1"]), - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - common = "1.0" - bar = "1.0" - - [build-dependencies] - common = { version = "1.0", features = ["opt_dep"] } - - [dev-dependencies] - common = { version="1.0", features=["dev-feat"] } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fix --edition --allow-no-vcs") - .with_stderr_unordered("\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] common v1.0.0 [..] -[DOWNLOADED] bar v1.0.0 [..] -[DOWNLOADED] opt_dep v1.0.0 [..] -note: Switching to Edition 2021 will enable the use of the version 2 feature resolver in Cargo. -This may cause some dependencies to be built with fewer features enabled than previously. -More information about the resolver changes may be found at https://doc.rust-lang.org/nightly/edition-guide/rust-2021/default-cargo-resolver.html -When building the following dependencies, the given features will no longer be used: - - common v1.0.0 removed features: dev-feat, f1, opt_dep - common v1.0.0 (as host dependency) removed features: dev-feat, f1 - -The following differences only apply when building with dev-dependencies: - - common v1.0.0 removed features: f1, opt_dep - -[CHECKING] opt_dep v1.0.0 -[CHECKING] common v1.0.0 -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[MIGRATING] src/lib.rs from 2018 edition to 2021 -[FINISHED] [..] -") - .run(); -} - -#[cargo_test] -fn rustfix_handles_multi_spans() { - // Checks that rustfix handles a single diagnostic with multiple - // suggestion spans (non_fmt_panic in this case). - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file( - "src/lib.rs", - r#" - pub fn foo() { - panic!(format!("hey")); - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs").run(); - assert!(p.read_file("src/lib.rs").contains(r#"panic!("hey");"#)); -} - -#[cargo_test] -fn fix_edition_2021() { - // Can migrate 2021, even when lints are allowed. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - "#, - ) - .file( - "src/lib.rs", - r#" - #![allow(ellipsis_inclusive_range_patterns)] - - pub fn f() -> bool { - let x = 123; - match x { - 0...100 => true, - _ => false, - } - } - "#, - ) - .build(); - p.cargo("fix --edition --allow-no-vcs") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[CHECKING] foo v0.1.0 [..] -[MIGRATING] src/lib.rs from 2018 edition to 2021 -[FIXED] src/lib.rs (1 fix) -[FINISHED] [..] -", - ) - .run(); - assert!(p.read_file("src/lib.rs").contains(r#"0..=100 => true,"#)); -} - -#[cargo_test] -fn fix_shared_cross_workspace() { - // Fixing a file that is shared between multiple packages in the same workspace. - // Make sure two processes don't try to fix the same file at the same time. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar"] - "#, - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "pub mod shared;") - // This will fix both unused and bare trait. - .file("foo/src/shared.rs", "pub fn fixme(x: Box<&Fn() -> ()>) {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file( - "bar/src/lib.rs", - r#" - #[path="../../foo/src/shared.rs"] - pub mod shared; - "#, - ) - .build(); - - // The output here can be either of these two, depending on who runs first: - // [FIXED] bar/src/../../foo/src/shared.rs (2 fixes) - // [FIXED] foo/src/shared.rs (2 fixes) - p.cargo("fix --allow-no-vcs") - .with_stderr_unordered( - "\ -[CHECKING] foo v0.1.0 [..] -[CHECKING] bar v0.1.0 [..] -[FIXED] [..]foo/src/shared.rs (2 fixes) -[FINISHED] [..] -", - ) - .run(); - - assert_match_exact( - "pub fn fixme(_x: Box<&dyn Fn() -> ()>) {}", - &p.read_file("foo/src/shared.rs"), - ); -} - -#[cargo_test] -fn abnormal_exit() { - // rustc fails unexpectedly after applying fixes, should show some error information. - // - // This works with a proc-macro that runs three times: - // - First run (collect diagnostics pass): writes a file, exits normally. - // - Second run (verify diagnostics work): it detects the presence of the - // file, removes the file, and aborts the process. - // - Third run (collecting messages to display): file not found, exits normally. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - pm = {path="pm"} - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn f() { - let mut x = 1; - pm::crashme!(); - } - "#, - ) - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - edition = "2018" - - [lib] - proc-macro = true - "#, - ) - .file( - "pm/src/lib.rs", - r#" - use proc_macro::TokenStream; - #[proc_macro] - pub fn crashme(_input: TokenStream) -> TokenStream { - // Use a file to succeed on the first pass, and fail on the second. - let p = std::env::var_os("ONCE_PATH").unwrap(); - let check_path = std::path::Path::new(&p); - if check_path.exists() { - eprintln!("I'm not a diagnostic."); - std::fs::remove_file(check_path).unwrap(); - std::process::abort(); - } else { - std::fs::write(check_path, "").unwrap(); - "".parse().unwrap() - } - } - "#, - ) - .build(); - - p.cargo("fix --lib --allow-no-vcs") - .env( - "ONCE_PATH", - paths::root().join("proc-macro-run-once").to_str().unwrap(), - ) - .with_stderr_contains( - "[WARNING] failed to automatically apply fixes suggested by rustc to crate `foo`", - ) - .with_stderr_contains("I'm not a diagnostic.") - // "signal: 6, SIGABRT: process abort signal" on some platforms - .with_stderr_contains("rustc exited abnormally: [..]") - .with_stderr_contains("Original diagnostics will follow.") - .run(); -} - -#[cargo_test] -fn fix_with_run_cargo_in_proc_macros() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [lib] - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - use proc_macro::*; - - #[proc_macro] - pub fn foo(_input: TokenStream) -> TokenStream { - let output = std::process::Command::new(env!("CARGO")) - .args(&["metadata", "--format-version=1"]) - .output() - .unwrap(); - eprintln!("{}", std::str::from_utf8(&output.stderr).unwrap()); - println!("{}", std::str::from_utf8(&output.stdout).unwrap()); - "".parse().unwrap() - } - "#, - ) - .file( - "src/bin/main.rs", - r#" - use foo::foo; - - fn main() { - foo!("bar") - } - "#, - ) - .build(); - p.cargo("fix --allow-no-vcs") - .masquerade_as_nightly_cargo() - .with_stderr_does_not_contain("error: could not find .rs file in rustc args") - .run(); -} - -#[cargo_test] -fn non_edition_lint_migration() { - // Migrating to a new edition where a non-edition lint causes problems. - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file( - "src/lib.rs", - r#" - // This is only used in a test. - // To be correct, this should be gated on #[cfg(test)], but - // sometimes people don't do that. If the unused_imports - // lint removes this, then the unittest will fail to compile. - use std::str::from_utf8; - - pub mod foo { - pub const FOO: &[u8] = &[102, 111, 111]; - } - - #[test] - fn example() { - assert_eq!( - from_utf8(::foo::FOO), Ok("foo") - ); - } - "#, - ) - .build(); - // Check that it complains about an unused import. - p.cargo("check --lib") - .with_stderr_contains("[..]unused_imports[..]") - .with_stderr_contains("[..]std::str::from_utf8[..]") - .run(); - p.cargo("fix --edition --allow-no-vcs") - // Remove once --force-warn is stabilized - .masquerade_as_nightly_cargo() - .run(); - let contents = p.read_file("src/lib.rs"); - // Check it does not remove the "unused" import. - assert!(contents.contains("use std::str::from_utf8;")); - // Check that it made the edition migration. - assert!(contents.contains("from_utf8(crate::foo::FOO)")); -} - -// For rust-lang/cargo#9857 -#[cargo_test] -fn fix_in_dependency() { - Package::new("bar", "1.0.0") - .file( - "src/lib.rs", - r#" - #[macro_export] - macro_rules! m { - ($i:tt) => { - let $i = 1; - }; - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn foo() { - bar::m!(abc); - } - "#, - ) - .build(); - - p.cargo("fix --allow-no-vcs") - .with_stderr_does_not_contain("[FIXED] [..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/freshness.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/freshness.rs deleted file mode 100644 index c764bcacd..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/freshness.rs +++ /dev/null @@ -1,2661 +0,0 @@ -//! Tests for fingerprinting (rebuild detection). - -use filetime::FileTime; -use std::fs::{self, OpenOptions}; -use std::io; -use std::io::prelude::*; -use std::net::TcpListener; -use std::path::{Path, PathBuf}; -use std::process::Stdio; -use std::thread; -use std::time::SystemTime; - -use super::death; -use cargo_test_support::paths::{self, CargoPathExt}; -use cargo_test_support::registry::Package; -use cargo_test_support::{ - basic_manifest, is_coarse_mtime, project, rustc_host, rustc_host_env, sleep_ms, -}; - -#[cargo_test] -fn modifying_and_moving() { - let p = project() - .file("src/main.rs", "mod a; fn main() {}") - .file("src/a.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build").with_stdout("").run(); - p.root().move_into_the_past(); - p.root().join("target").move_into_the_past(); - - p.change_file("src/a.rs", "#[allow(unused)]fn main() {}"); - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - fs::rename(&p.root().join("src/a.rs"), &p.root().join("src/b.rs")).unwrap(); - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]file not found[..]") - .run(); -} - -#[cargo_test] -fn modify_only_some_files() { - let p = project() - .file("src/lib.rs", "mod a;") - .file("src/a.rs", "") - .file("src/main.rs", "mod b; fn main() {}") - .file("src/b.rs", "") - .file("tests/test.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("test").run(); - sleep_ms(1000); - - assert!(p.bin("foo").is_file()); - - let lib = p.root().join("src/lib.rs"); - p.change_file("src/lib.rs", "invalid rust code"); - p.change_file("src/b.rs", "#[allow(unused)]fn foo() {}"); - lib.move_into_the_past(); - - // Make sure the binary is rebuilt, not the lib - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - assert!(p.bin("foo").is_file()); -} - -#[cargo_test] -fn rebuild_sub_package_then_while_package() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [dependencies.a] - path = "a" - [dependencies.b] - path = "b" - "#, - ) - .file("src/lib.rs", "extern crate a; extern crate b;") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - authors = [] - version = "0.0.1" - [dependencies.b] - path = "../b" - "#, - ) - .file("a/src/lib.rs", "extern crate b;") - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] b [..] -[COMPILING] a [..] -[COMPILING] foo [..] -[FINISHED] dev [..] -", - ) - .run(); - - if is_coarse_mtime() { - sleep_ms(1000); - } - p.change_file("b/src/lib.rs", "pub fn b() {}"); - - p.cargo("build -pb -v") - .with_stderr( - "\ -[COMPILING] b [..] -[RUNNING] `rustc --crate-name b [..] -[FINISHED] dev [..] -", - ) - .run(); - - p.change_file( - "src/lib.rs", - "extern crate a; extern crate b; pub fn toplevel() {}", - ); - - p.cargo("build -v") - .with_stderr( - "\ -[FRESH] b [..] -[COMPILING] a [..] -[RUNNING] `rustc --crate-name a [..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo [..] -[FINISHED] dev [..] -", - ) - .run(); -} - -#[cargo_test] -fn changing_lib_features_caches_targets() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [features] - foo = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[..]Compiling foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build --features foo") - .with_stderr( - "\ -[..]Compiling foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - /* Targets should be cached from the first build */ - - p.cargo("build") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - - p.cargo("build").with_stdout("").run(); - - p.cargo("build --features foo") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); -} - -#[cargo_test] -fn changing_profiles_caches_targets() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [profile.dev] - panic = "abort" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[..]Compiling foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("test") - .with_stderr( - "\ -[..]Compiling foo v0.0.1 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target[..]debug[..]deps[..]foo-[..][EXE]) -[DOCTEST] foo -", - ) - .run(); - - /* Targets should be cached from the first build */ - - p.cargo("build") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - - p.cargo("test foo") - .with_stderr( - "\ -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target[..]debug[..]deps[..]foo-[..][EXE]) -", - ) - .run(); -} - -#[cargo_test] -fn changing_bin_paths_common_target_features_caches_targets() { - // Make sure dep_cache crate is built once per feature - let p = project() - .no_manifest() - .file( - ".cargo/config", - r#" - [build] - target-dir = "./target" - "#, - ) - .file( - "dep_crate/Cargo.toml", - r#" - [package] - name = "dep_crate" - version = "0.0.1" - authors = [] - - [features] - ftest = [] - "#, - ) - .file( - "dep_crate/src/lib.rs", - r#" - #[cfg(feature = "ftest")] - pub fn yo() { - println!("ftest on") - } - #[cfg(not(feature = "ftest"))] - pub fn yo() { - println!("ftest off") - } - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies] - dep_crate = {path = "../dep_crate", features = []} - "#, - ) - .file("a/src/lib.rs", "") - .file( - "a/src/main.rs", - r#" - extern crate dep_crate; - use dep_crate::yo; - fn main() { - yo(); - } - "#, - ) - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - - [dependencies] - dep_crate = {path = "../dep_crate", features = ["ftest"]} - "#, - ) - .file("b/src/lib.rs", "") - .file( - "b/src/main.rs", - r#" - extern crate dep_crate; - use dep_crate::yo; - fn main() { - yo(); - } - "#, - ) - .build(); - - /* Build and rebuild a/. Ensure dep_crate only builds once */ - p.cargo("run") - .cwd("a") - .with_stdout("ftest off") - .with_stderr( - "\ -[..]Compiling dep_crate v0.0.1 ([..]) -[..]Compiling a v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]target/debug/a[EXE]` -", - ) - .run(); - p.cargo("clean -p a").cwd("a").run(); - p.cargo("run") - .cwd("a") - .with_stdout("ftest off") - .with_stderr( - "\ -[..]Compiling a v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]target/debug/a[EXE]` -", - ) - .run(); - - /* Build and rebuild b/. Ensure dep_crate only builds once */ - p.cargo("run") - .cwd("b") - .with_stdout("ftest on") - .with_stderr( - "\ -[..]Compiling dep_crate v0.0.1 ([..]) -[..]Compiling b v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]target/debug/b[EXE]` -", - ) - .run(); - p.cargo("clean -p b").cwd("b").run(); - p.cargo("run") - .cwd("b") - .with_stdout("ftest on") - .with_stderr( - "\ -[..]Compiling b v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]target/debug/b[EXE]` -", - ) - .run(); - - /* Build a/ package again. If we cache different feature dep builds correctly, - * this should not cause a rebuild of dep_crate */ - p.cargo("clean -p a").cwd("a").run(); - p.cargo("run") - .cwd("a") - .with_stdout("ftest off") - .with_stderr( - "\ -[..]Compiling a v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]target/debug/a[EXE]` -", - ) - .run(); - - /* Build b/ package again. If we cache different feature dep builds correctly, - * this should not cause a rebuild */ - p.cargo("clean -p b").cwd("b").run(); - p.cargo("run") - .cwd("b") - .with_stdout("ftest on") - .with_stderr( - "\ -[..]Compiling b v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]target/debug/b[EXE]` -", - ) - .run(); -} - -#[cargo_test] -fn changing_bin_features_caches_targets() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [features] - foo = [] - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - let msg = if cfg!(feature = "foo") { "feature on" } else { "feature off" }; - println!("{}", msg); - } - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.rename_run("foo", "off1").with_stdout("feature off").run(); - - p.cargo("build --features foo") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.rename_run("foo", "on1").with_stdout("feature on").run(); - - /* Targets should be cached from the first build */ - - let mut e = p.cargo("build"); - // MSVC does not include hash in binary filename, so it gets recompiled. - if cfg!(target_env = "msvc") { - e.with_stderr("[COMPILING] foo[..]\n[FINISHED] dev[..]"); - } else { - e.with_stderr("[FINISHED] dev[..]"); - } - e.run(); - p.rename_run("foo", "off2").with_stdout("feature off").run(); - - let mut e = p.cargo("build --features foo"); - if cfg!(target_env = "msvc") { - e.with_stderr("[COMPILING] foo[..]\n[FINISHED] dev[..]"); - } else { - e.with_stderr("[FINISHED] dev[..]"); - } - e.run(); - p.rename_run("foo", "on2").with_stdout("feature on").run(); -} - -#[cargo_test] -fn rebuild_tests_if_lib_changes() { - let p = project() - .file("src/lib.rs", "pub fn foo() {}") - .file( - "tests/foo.rs", - r#" - extern crate foo; - #[test] - fn test() { foo::foo(); } - "#, - ) - .build(); - - p.cargo("build").run(); - p.cargo("test").run(); - - sleep_ms(1000); - p.change_file("src/lib.rs", ""); - - p.cargo("build -v").run(); - p.cargo("test -v") - .with_status(101) - .with_stderr_contains("[..]cannot find function `foo`[..]") - .run(); -} - -#[cargo_test] -fn no_rebuild_transitive_target_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - [dev-dependencies] - b = { path = "b" } - "#, - ) - .file("src/lib.rs", "") - .file("tests/foo.rs", "") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [target.foo.dependencies] - c = { path = "../c" } - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - - [dependencies] - c = { path = "../c" } - "#, - ) - .file("b/src/lib.rs", "") - .file("c/Cargo.toml", &basic_manifest("c", "0.0.1")) - .file("c/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("test --no-run") - .with_stderr( - "\ -[COMPILING] c v0.0.1 ([..]) -[COMPILING] b v0.0.1 ([..]) -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rerun_if_changed_in_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "a/build.rs", - r#" - fn main() { - println!("cargo:rerun-if-changed=build.rs"); - } - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn same_build_dir_cached_packages() { - let p = project() - .no_manifest() - .file( - "a1/Cargo.toml", - r#" - [package] - name = "a1" - version = "0.0.1" - authors = [] - [dependencies] - b = { path = "../b" } - "#, - ) - .file("a1/src/lib.rs", "") - .file( - "a2/Cargo.toml", - r#" - [package] - name = "a2" - version = "0.0.1" - authors = [] - [dependencies] - b = { path = "../b" } - "#, - ) - .file("a2/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - [dependencies] - c = { path = "../c" } - "#, - ) - .file("b/src/lib.rs", "") - .file( - "c/Cargo.toml", - r#" - [package] - name = "c" - version = "0.0.1" - authors = [] - [dependencies] - d = { path = "../d" } - "#, - ) - .file("c/src/lib.rs", "") - .file("d/Cargo.toml", &basic_manifest("d", "0.0.1")) - .file("d/src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - target-dir = "./target" - "#, - ) - .build(); - - p.cargo("build") - .cwd("a1") - .with_stderr(&format!( - "\ -[COMPILING] d v0.0.1 ({dir}/d) -[COMPILING] c v0.0.1 ({dir}/c) -[COMPILING] b v0.0.1 ({dir}/b) -[COMPILING] a1 v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - dir = p.url().to_file_path().unwrap().to_str().unwrap() - )) - .run(); - p.cargo("build") - .cwd("a2") - .with_stderr( - "\ -[COMPILING] a2 v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn no_rebuild_if_build_artifacts_move_backwards_in_time() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - p.root().move_into_the_past(); - - p.cargo("build") - .with_stdout("") - .with_stderr("[FINISHED] [..]") - .run(); -} - -#[cargo_test] -fn rebuild_if_build_artifacts_move_forward_in_time() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - p.root().move_into_the_future(); - - p.cargo("build") - .env("CARGO_LOG", "") - .with_stdout("") - .with_stderr( - "\ -[COMPILING] a v0.0.1 ([..]) -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn rebuild_if_environment_changes() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - description = "old desc" - version = "0.0.1" - authors = [] - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - println!("{}", env!("CARGO_PKG_DESCRIPTION")); - } - "#, - ) - .build(); - - p.cargo("run") - .with_stdout("old desc") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo[EXE]` -", - ) - .run(); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - description = "new desc" - version = "0.0.1" - authors = [] - "#, - ); - - p.cargo("run") - .with_stdout("new desc") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo[EXE]` -", - ) - .run(); -} - -#[cargo_test] -fn no_rebuild_when_rename_dir() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [workspace] - - [dependencies] - foo = { path = "foo" } - "#, - ) - .file("src/_unused.rs", "") - .file("build.rs", "fn main() {}") - .file("foo/Cargo.toml", &basic_manifest("foo", "0.0.1")) - .file("foo/src/lib.rs", "") - .file("foo/build.rs", "fn main() {}") - .build(); - - // make sure the most recently modified file is `src/lib.rs`, not - // `Cargo.toml`, to expose a historical bug where we forgot to strip the - // `Cargo.toml` path from looking for the package root. - cargo_test_support::sleep_ms(100); - fs::write(p.root().join("src/lib.rs"), "").unwrap(); - - p.cargo("build").run(); - let mut new = p.root(); - new.pop(); - new.push("bar"); - fs::rename(p.root(), &new).unwrap(); - - p.cargo("build") - .cwd(&new) - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); -} - -#[cargo_test] -fn unused_optional_dep() { - Package::new("registry1", "0.1.0").publish(); - Package::new("registry2", "0.1.0").publish(); - Package::new("registry3", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "p" - authors = [] - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - baz = { path = "baz" } - registry1 = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.1" - authors = [] - - [dev-dependencies] - registry2 = "*" - "#, - ) - .file("bar/src/lib.rs", "") - .file( - "baz/Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.1" - authors = [] - - [dependencies] - registry3 = { version = "*", optional = true } - "#, - ) - .file("baz/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn path_dev_dep_registry_updates() { - Package::new("registry1", "0.1.0").publish(); - Package::new("registry2", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "p" - authors = [] - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.1" - authors = [] - - [dependencies] - registry1 = "*" - - [dev-dependencies] - baz = { path = "../baz"} - "#, - ) - .file("bar/src/lib.rs", "") - .file( - "baz/Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.1" - authors = [] - - [dependencies] - registry2 = "*" - "#, - ) - .file("baz/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn change_panic_mode() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ['bar', 'baz'] - [profile.dev] - panic = 'abort' - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", "") - .file( - "baz/Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.1" - authors = [] - - [lib] - proc-macro = true - - [dependencies] - bar = { path = '../bar' } - "#, - ) - .file("baz/src/lib.rs", "extern crate bar;") - .build(); - - p.cargo("build -p bar").run(); - p.cargo("build -p baz").run(); -} - -#[cargo_test] -fn dont_rebuild_based_on_plugins() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.1" - - [workspace] - members = ['baz'] - - [dependencies] - proc-macro-thing = { path = 'proc-macro-thing' } - "#, - ) - .file("src/lib.rs", "") - .file( - "proc-macro-thing/Cargo.toml", - r#" - [package] - name = "proc-macro-thing" - version = "0.1.1" - - [lib] - proc-macro = true - - [dependencies] - qux = { path = '../qux' } - "#, - ) - .file("proc-macro-thing/src/lib.rs", "") - .file( - "baz/Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.1" - - [dependencies] - qux = { path = '../qux' } - "#, - ) - .file("baz/src/main.rs", "fn main() {}") - .file("qux/Cargo.toml", &basic_manifest("qux", "0.1.1")) - .file("qux/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build -p baz").run(); - p.cargo("build").with_stderr("[FINISHED] [..]\n").run(); - p.cargo("build -p bar") - .with_stderr("[FINISHED] [..]\n") - .run(); -} - -#[cargo_test] -fn reuse_workspace_lib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.1" - - [workspace] - - [dependencies] - baz = { path = 'baz' } - "#, - ) - .file("src/lib.rs", "") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1")) - .file("baz/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("test -p baz -v --no-run") - .with_stderr( - "\ -[COMPILING] baz v0.1.1 ([..]) -[RUNNING] `rustc[..] --test [..]` -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn reuse_shared_build_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - shared = {path = "shared"} - - [workspace] - members = ["shared", "bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("shared/Cargo.toml", &basic_manifest("shared", "0.0.1")) - .file("shared/src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - - [build-dependencies] - shared = { path = "../shared" } - "#, - ) - .file("bar/src/lib.rs", "") - .file("bar/build.rs", "fn main() {}") - .build(); - - p.cargo("build --workspace").run(); - // This should not recompile! - p.cargo("build -p foo -v") - .with_stderr( - "\ -[FRESH] shared [..] -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn changing_rustflags_is_cached() { - let p = project().file("src/lib.rs", "").build(); - - // This isn't ever cached, we always have to recompile - for _ in 0..2 { - p.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); - p.cargo("build") - .env("RUSTFLAGS", "-C linker=cc") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); - } -} - -#[cargo_test] -fn update_dependency_mtime_does_not_rebuild() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build -Z mtime-on-use") - .masquerade_as_nightly_cargo() - .env("RUSTFLAGS", "-C linker=cc") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([..]) -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); - // This does not make new files, but it does update the mtime of the dependency. - p.cargo("build -p bar -Z mtime-on-use") - .masquerade_as_nightly_cargo() - .env("RUSTFLAGS", "-C linker=cc") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - // This should not recompile! - p.cargo("build -Z mtime-on-use") - .masquerade_as_nightly_cargo() - .env("RUSTFLAGS", "-C linker=cc") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); -} - -fn fingerprint_cleaner(mut dir: PathBuf, timestamp: filetime::FileTime) { - // Cargo is experimenting with letting outside projects develop some - // limited forms of GC for target_dir. This is one of the forms. - // Specifically, Cargo is updating the mtime of a file in - // target/profile/.fingerprint each time it uses the fingerprint. - // So a cleaner can remove files associated with a fingerprint - // if all the files in the fingerprint's folder are older then a time stamp without - // effecting any builds that happened since that time stamp. - let mut cleand = false; - dir.push(".fingerprint"); - for fing in fs::read_dir(&dir).unwrap() { - let fing = fing.unwrap(); - - let outdated = |f: io::Result| { - filetime::FileTime::from_last_modification_time(&f.unwrap().metadata().unwrap()) - <= timestamp - }; - if fs::read_dir(fing.path()).unwrap().all(outdated) { - fs::remove_dir_all(fing.path()).unwrap(); - println!("remove: {:?}", fing.path()); - // a real cleaner would remove the big files in deps and build as well - // but fingerprint is sufficient for our tests - cleand = true; - } else { - } - } - assert!( - cleand, - "called fingerprint_cleaner, but there was nothing to remove" - ); -} - -#[cargo_test] -fn fingerprint_cleaner_does_not_rebuild() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { path = "bar" } - - [features] - a = [] - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build -Z mtime-on-use") - .masquerade_as_nightly_cargo() - .run(); - p.cargo("build -Z mtime-on-use --features a") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); - if is_coarse_mtime() { - sleep_ms(1000); - } - let timestamp = filetime::FileTime::from_system_time(SystemTime::now()); - if is_coarse_mtime() { - sleep_ms(1000); - } - // This does not make new files, but it does update the mtime. - p.cargo("build -Z mtime-on-use --features a") - .masquerade_as_nightly_cargo() - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - fingerprint_cleaner(p.target_debug_dir(), timestamp); - // This should not recompile! - p.cargo("build -Z mtime-on-use --features a") - .masquerade_as_nightly_cargo() - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - // But this should be cleaned and so need a rebuild - p.cargo("build -Z mtime-on-use") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn reuse_panic_build_dep_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [build-dependencies] - bar = { path = "bar" } - - [dev-dependencies] - bar = { path = "bar" } - - [profile.dev] - panic = "abort" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - // Check that `bar` is not built twice. It is only needed once (without `panic`). - p.cargo("test --lib --no-run -v") - .with_stderr( - "\ -[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar [..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name build_script_build [..] -[RUNNING] [..]build-script-build` -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--test[..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn reuse_panic_pm() { - // foo(panic) -> bar(panic) - // somepm(nopanic) -> bar(nopanic) - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { path = "bar" } - somepm = { path = "somepm" } - - [profile.dev] - panic = "abort" - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .file( - "somepm/Cargo.toml", - r#" - [package] - name = "somepm" - version = "0.0.1" - - [lib] - proc-macro = true - - [dependencies] - bar = { path = "../bar" } - "#, - ) - .file("somepm/src/lib.rs", "extern crate bar;") - .build(); - - // bar is built once without panic (for proc-macro) and once with (for the - // normal dependency). - p.cargo("build -v") - .with_stderr_unordered( - "\ -[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C debuginfo=2 [..] -[RUNNING] `rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C debuginfo=2 [..] -[COMPILING] somepm [..] -[RUNNING] `rustc --crate-name somepm [..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C panic=abort[..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn bust_patched_dep() { - Package::new("registry1", "0.1.0").publish(); - Package::new("registry2", "0.1.0") - .dep("registry1", "0.1.0") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - registry2 = "0.1.0" - - [patch.crates-io] - registry1 = { path = "reg1new" } - "#, - ) - .file("src/lib.rs", "") - .file("reg1new/Cargo.toml", &basic_manifest("registry1", "0.1.0")) - .file("reg1new/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - if is_coarse_mtime() { - sleep_ms(1000); - } - - p.change_file("reg1new/src/lib.rs", ""); - if is_coarse_mtime() { - sleep_ms(1000); - } - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] registry1 v0.1.0 ([..]) -[COMPILING] registry2 v0.1.0 -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); - - p.cargo("build -v") - .with_stderr( - "\ -[FRESH] registry1 v0.1.0 ([..]) -[FRESH] registry2 v0.1.0 -[FRESH] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn rebuild_on_mid_build_file_modification() { - let server = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = server.local_addr().unwrap(); - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["root", "proc_macro_dep"] - "#, - ) - .file( - "root/Cargo.toml", - r#" - [package] - name = "root" - version = "0.1.0" - authors = [] - - [dependencies] - proc_macro_dep = { path = "../proc_macro_dep" } - "#, - ) - .file( - "root/src/lib.rs", - r#" - #[macro_use] - extern crate proc_macro_dep; - - #[derive(Noop)] - pub struct X; - "#, - ) - .file( - "proc_macro_dep/Cargo.toml", - r#" - [package] - name = "proc_macro_dep" - version = "0.1.0" - authors = [] - - [lib] - proc-macro = true - "#, - ) - .file( - "proc_macro_dep/src/lib.rs", - &format!( - r#" - extern crate proc_macro; - - use std::io::Read; - use std::net::TcpStream; - use proc_macro::TokenStream; - - #[proc_macro_derive(Noop)] - pub fn noop(_input: TokenStream) -> TokenStream {{ - let mut stream = TcpStream::connect("{}").unwrap(); - let mut v = Vec::new(); - stream.read_to_end(&mut v).unwrap(); - "".parse().unwrap() - }} - "#, - addr - ), - ) - .build(); - let root = p.root(); - - let t = thread::spawn(move || { - let socket = server.accept().unwrap().0; - sleep_ms(1000); - let mut file = OpenOptions::new() - .write(true) - .append(true) - .open(root.join("root/src/lib.rs")) - .unwrap(); - writeln!(file, "// modified").expect("Failed to append to root sources"); - drop(file); - drop(socket); - drop(server.accept().unwrap()); - }); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] proc_macro_dep v0.1.0 ([..]/proc_macro_dep) -[COMPILING] root v0.1.0 ([..]/root) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] root v0.1.0 ([..]/root) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - t.join().ok().unwrap(); -} - -#[cargo_test] -fn dirty_both_lib_and_test() { - // This tests that all artifacts that depend on the results of a build - // script will get rebuilt when the build script reruns, even for separate - // commands. It does the following: - // - // 1. Project "foo" has a build script which will compile a small - // staticlib to link against. Normally this would use the `cc` crate, - // but here we just use rustc to avoid the `cc` dependency. - // 2. Build the library. - // 3. Build the unit test. The staticlib intentionally has a bad value. - // 4. Rewrite the staticlib with the correct value. - // 5. Build the library again. - // 6. Build the unit test. This should recompile. - - let slib = |n| { - format!( - r#" - #[no_mangle] - pub extern "C" fn doit() -> i32 {{ - return {}; - }} - "#, - n - ) - }; - - let p = project() - .file( - "src/lib.rs", - r#" - extern "C" { - fn doit() -> i32; - } - - #[test] - fn t1() { - assert_eq!(unsafe { doit() }, 1, "doit assert failure"); - } - "#, - ) - .file( - "build.rs", - r#" - use std::env; - use std::path::PathBuf; - use std::process::Command; - - fn main() { - let rustc = env::var_os("RUSTC").unwrap(); - let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); - assert!( - Command::new(rustc) - .args(&[ - "--crate-type=staticlib", - "--out-dir", - out_dir.to_str().unwrap(), - "slib.rs" - ]) - .status() - .unwrap() - .success(), - "slib build failed" - ); - println!("cargo:rustc-link-lib=slib"); - println!("cargo:rustc-link-search={}", out_dir.display()); - } - "#, - ) - .file("slib.rs", &slib(2)) - .build(); - - p.cargo("build").run(); - - // 2 != 1 - p.cargo("test --lib") - .with_status(101) - .with_stdout_contains("[..]doit assert failure[..]") - .run(); - - if is_coarse_mtime() { - // #5918 - sleep_ms(1000); - } - // Fix the mistake. - p.change_file("slib.rs", &slib(1)); - - p.cargo("build").run(); - // This should recompile with the new static lib, and the test should pass. - p.cargo("test --lib").run(); -} - -#[cargo_test] -fn script_fails_stay_dirty() { - // Check if a script is aborted (such as hitting Ctrl-C) that it will re-run. - // Steps: - // 1. Build to establish fingerprints. - // 2. Make a change that triggers the build script to re-run. Abort the - // script while it is running. - // 3. Run the build again and make sure it re-runs the script. - let p = project() - .file( - "build.rs", - r#" - mod helper; - fn main() { - println!("cargo:rerun-if-changed=build.rs"); - helper::doit(); - } - "#, - ) - .file("helper.rs", "pub fn doit() {}") - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - if is_coarse_mtime() { - sleep_ms(1000); - } - p.change_file("helper.rs", r#"pub fn doit() {panic!("Crash!");}"#); - p.cargo("build") - .with_stderr_contains("[..]Crash![..]") - .with_status(101) - .run(); - // There was a bug where this second call would be "fresh". - p.cargo("build") - .with_stderr_contains("[..]Crash![..]") - .with_status(101) - .run(); -} - -#[cargo_test] -fn simulated_docker_deps_stay_cached() { - // Test what happens in docker where the nanoseconds are zeroed out. - Package::new("regdep", "1.0.0").publish(); - Package::new("regdep_old_style", "1.0.0") - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .publish(); - Package::new("regdep_env", "1.0.0") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-env-changed=SOMEVAR"); - } - "#, - ) - .file("src/lib.rs", "") - .publish(); - Package::new("regdep_rerun", "1.0.0") - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-changed=build.rs"); - } - "#, - ) - .file("src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - pathdep = { path = "pathdep" } - regdep = "1.0" - regdep_old_style = "1.0" - regdep_env = "1.0" - regdep_rerun = "1.0" - "#, - ) - .file( - "src/lib.rs", - " - extern crate pathdep; - extern crate regdep; - extern crate regdep_old_style; - extern crate regdep_env; - extern crate regdep_rerun; - ", - ) - .file("build.rs", "fn main() {}") - .file("pathdep/Cargo.toml", &basic_manifest("pathdep", "1.0.0")) - .file("pathdep/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - let already_zero = { - // This happens on HFS with 1-second timestamp resolution, - // or other filesystems where it just so happens to write exactly on a - // 1-second boundary. - let metadata = fs::metadata(p.root().join("src/lib.rs")).unwrap(); - let mtime = FileTime::from_last_modification_time(&metadata); - mtime.nanoseconds() == 0 - }; - - // Recursively remove `nanoseconds` from every path. - fn zeropath(path: &Path) { - for entry in walkdir::WalkDir::new(path) - .into_iter() - .filter_map(|e| e.ok()) - { - let metadata = fs::metadata(entry.path()).unwrap(); - let mtime = metadata.modified().unwrap(); - let mtime_duration = mtime.duration_since(SystemTime::UNIX_EPOCH).unwrap(); - let trunc_mtime = FileTime::from_unix_time(mtime_duration.as_secs() as i64, 0); - let atime = metadata.accessed().unwrap(); - let atime_duration = atime.duration_since(SystemTime::UNIX_EPOCH).unwrap(); - let trunc_atime = FileTime::from_unix_time(atime_duration.as_secs() as i64, 0); - if let Err(e) = filetime::set_file_times(entry.path(), trunc_atime, trunc_mtime) { - // Windows doesn't allow changing filetimes on some things - // (directories, other random things I'm not sure why). Just - // ignore them. - if e.kind() == std::io::ErrorKind::PermissionDenied { - println!("PermissionDenied filetime on {:?}", entry.path()); - } else { - panic!("FileTime error on {:?}: {:?}", entry.path(), e); - } - } - } - } - zeropath(&p.root()); - zeropath(&paths::home()); - - if already_zero { - println!("already zero"); - // If it was already truncated, then everything stays fresh. - p.cargo("build -v") - .with_stderr_unordered( - "\ -[FRESH] pathdep [..] -[FRESH] regdep [..] -[FRESH] regdep_env [..] -[FRESH] regdep_old_style [..] -[FRESH] regdep_rerun [..] -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); - } else { - println!("not already zero"); - // It is not ideal that `foo` gets recompiled, but that is the current - // behavior. Currently mtimes are ignored for registry deps. - // - // Note that this behavior is due to the fact that `foo` has a build - // script in "old" mode where it doesn't print `rerun-if-*`. In this - // mode we use `Precalculated` to fingerprint a path dependency, where - // `Precalculated` is an opaque string which has the most recent mtime - // in it. It differs between builds because one has nsec=0 and the other - // likely has a nonzero nsec. Hence, the rebuild. - p.cargo("build -v") - .with_stderr_unordered( - "\ -[FRESH] pathdep [..] -[FRESH] regdep [..] -[FRESH] regdep_env [..] -[FRESH] regdep_old_style [..] -[FRESH] regdep_rerun [..] -[COMPILING] foo [..] -[RUNNING] [..]/foo-[..]/build-script-build[..] -[RUNNING] `rustc --crate-name foo[..] -[FINISHED] [..] -", - ) - .run(); - } -} - -#[cargo_test] -fn metadata_change_invalidates() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - for attr in &[ - "authors = [\"foo\"]", - "description = \"desc\"", - "homepage = \"https://example.com\"", - "repository =\"https://example.com\"", - ] { - let mut file = OpenOptions::new() - .write(true) - .append(true) - .open(p.root().join("Cargo.toml")) - .unwrap(); - writeln!(file, "{}", attr).unwrap(); - p.cargo("build") - .with_stderr_contains("[COMPILING] foo [..]") - .run(); - } - p.cargo("build -v") - .with_stderr_contains("[FRESH] foo[..]") - .run(); - assert_eq!(p.glob("target/debug/deps/libfoo-*.rlib").count(), 1); -} - -#[cargo_test] -fn edition_change_invalidates() { - const MANIFEST: &str = r#" - [package] - name = "foo" - version = "0.1.0" - "#; - let p = project() - .file("Cargo.toml", MANIFEST) - .file("src/lib.rs", "") - .build(); - p.cargo("build").run(); - p.change_file("Cargo.toml", &format!("{}edition = \"2018\"", MANIFEST)); - p.cargo("build") - .with_stderr_contains("[COMPILING] foo [..]") - .run(); - p.change_file( - "Cargo.toml", - &format!( - r#"{}edition = "2018" - [lib] - edition = "2015" - "#, - MANIFEST - ), - ); - p.cargo("build") - .with_stderr_contains("[COMPILING] foo [..]") - .run(); - p.cargo("build -v") - .with_stderr_contains("[FRESH] foo[..]") - .run(); - assert_eq!(p.glob("target/debug/deps/libfoo-*.rlib").count(), 1); -} - -#[cargo_test] -fn rename_with_path_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = { path = 'a' } - "#, - ) - .file("src/lib.rs", "extern crate a; pub fn foo() { a::foo(); }") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - - [dependencies] - b = { path = 'b' } - "#, - ) - .file("a/src/lib.rs", "extern crate b; pub fn foo() { b::foo() }") - .file( - "a/b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - "#, - ) - .file("a/b/src/lib.rs", "pub fn foo() { }"); - let p = p.build(); - - p.cargo("build").run(); - - // Now rename the root directory and rerun `cargo run`. Not only should we - // not build anything but we also shouldn't crash. - let mut new = p.root(); - new.pop(); - new.push("foo2"); - - fs::rename(p.root(), &new).unwrap(); - - p.cargo("build") - .cwd(&new) - .with_stderr("[FINISHED] [..]") - .run(); -} - -#[cargo_test] -fn move_target_directory_with_path_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = { path = "a" } - "#, - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - "#, - ) - .file("src/lib.rs", "extern crate a; pub use a::print_msg;") - .file( - "a/build.rs", - r###" - use std::env; - use std::fs; - use std::path::Path; - - fn main() { - println!("cargo:rerun-if-changed=build.rs"); - let out_dir = env::var("OUT_DIR").unwrap(); - let dest_path = Path::new(&out_dir).join("hello.rs"); - fs::write(&dest_path, r#" - pub fn message() -> &'static str { - "Hello, World!" - } - "#).unwrap(); - } - "###, - ) - .file( - "a/src/lib.rs", - r#" - include!(concat!(env!("OUT_DIR"), "/hello.rs")); - pub fn print_msg() { message(); } - "#, - ); - let p = p.build(); - - let mut parent = p.root(); - parent.pop(); - - p.cargo("build").run(); - - let new_target = p.root().join("target2"); - fs::rename(p.root().join("target"), &new_target).unwrap(); - - p.cargo("build") - .env("CARGO_TARGET_DIR", &new_target) - .with_stderr("[FINISHED] [..]") - .run(); -} - -#[cargo_test] -fn rerun_if_changes() { - let p = project() - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rerun-if-env-changed=FOO"); - if std::env::var("FOO").is_ok() { - println!("cargo:rerun-if-env-changed=BAR"); - } - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); - - p.cargo("build -v") - .env("FOO", "1") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `[..]build-script-build` -[RUNNING] `rustc [..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("build") - .env("FOO", "1") - .with_stderr("[FINISHED] [..]") - .run(); - - p.cargo("build -v") - .env("FOO", "1") - .env("BAR", "1") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `[..]build-script-build` -[RUNNING] `rustc [..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("build") - .env("FOO", "1") - .env("BAR", "1") - .with_stderr("[FINISHED] [..]") - .run(); - - p.cargo("build -v") - .env("BAR", "2") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `[..]build-script-build` -[RUNNING] `rustc [..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("build") - .env("BAR", "2") - .with_stderr("[FINISHED] [..]") - .run(); -} - -#[cargo_test] -fn channel_shares_filenames() { - // Test that different "nightly" releases use the same output filename. - - // Create separate rustc binaries to emulate running different toolchains. - let nightly1 = format!( - "\ -rustc 1.44.0-nightly (38114ff16 2020-03-21) -binary: rustc -commit-hash: 38114ff16e7856f98b2b4be7ab4cd29b38bed59a -commit-date: 2020-03-21 -host: {} -release: 1.44.0-nightly -LLVM version: 9.0 -", - rustc_host() - ); - - let nightly2 = format!( - "\ -rustc 1.44.0-nightly (a5b09d354 2020-03-31) -binary: rustc -commit-hash: a5b09d35473615e7142f5570f5c5fad0caf68bd2 -commit-date: 2020-03-31 -host: {} -release: 1.44.0-nightly -LLVM version: 9.0 -", - rustc_host() - ); - - let beta1 = format!( - "\ -rustc 1.43.0-beta.3 (4c587bbda 2020-03-25) -binary: rustc -commit-hash: 4c587bbda04ab55aaf56feab11dfdfe387a85d7a -commit-date: 2020-03-25 -host: {} -release: 1.43.0-beta.3 -LLVM version: 9.0 -", - rustc_host() - ); - - let beta2 = format!( - "\ -rustc 1.42.0-beta.5 (4e1c5f0e9 2020-02-28) -binary: rustc -commit-hash: 4e1c5f0e9769a588b91c977e3d81e140209ef3a2 -commit-date: 2020-02-28 -host: {} -release: 1.42.0-beta.5 -LLVM version: 9.0 -", - rustc_host() - ); - - let stable1 = format!( - "\ -rustc 1.42.0 (b8cedc004 2020-03-09) -binary: rustc -commit-hash: b8cedc00407a4c56a3bda1ed605c6fc166655447 -commit-date: 2020-03-09 -host: {} -release: 1.42.0 -LLVM version: 9.0 -", - rustc_host() - ); - - let stable2 = format!( - "\ -rustc 1.41.1 (f3e1a954d 2020-02-24) -binary: rustc -commit-hash: f3e1a954d2ead4e2fc197c7da7d71e6c61bad196 -commit-date: 2020-02-24 -host: {} -release: 1.41.1 -LLVM version: 9.0 -", - rustc_host() - ); - - let compiler = project() - .at("compiler") - .file("Cargo.toml", &basic_manifest("compiler", "0.1.0")) - .file( - "src/main.rs", - r#" - fn main() { - if std::env::args_os().any(|a| a == "-vV") { - print!("{}", env!("FUNKY_VERSION_TEST")); - return; - } - let mut cmd = std::process::Command::new("rustc"); - cmd.args(std::env::args_os().skip(1)); - assert!(cmd.status().unwrap().success()); - } - "#, - ) - .build(); - - let makeit = |version, vv| { - // Force a rebuild. - compiler.target_debug_dir().join("deps").rm_rf(); - compiler.cargo("build").env("FUNKY_VERSION_TEST", vv).run(); - fs::rename(compiler.bin("compiler"), compiler.bin(version)).unwrap(); - }; - makeit("nightly1", nightly1); - makeit("nightly2", nightly2); - makeit("beta1", beta1); - makeit("beta2", beta2); - makeit("stable1", stable1); - makeit("stable2", stable2); - - // Run `cargo check` with different rustc versions to observe its behavior. - let p = project().file("src/lib.rs", "").build(); - - // Runs `cargo check` and returns the rmeta filename created. - // Checks that the freshness matches the given value. - let check = |version, fresh| -> String { - let output = p - .cargo("check --message-format=json") - .env("RUSTC", compiler.bin(version)) - .exec_with_output() - .unwrap(); - // Collect the filenames generated. - let mut artifacts: Vec<_> = std::str::from_utf8(&output.stdout) - .unwrap() - .lines() - .filter_map(|line| { - let value: serde_json::Value = serde_json::from_str(line).unwrap(); - if value["reason"].as_str().unwrap() == "compiler-artifact" { - assert_eq!(value["fresh"].as_bool().unwrap(), fresh); - let filenames = value["filenames"].as_array().unwrap(); - assert_eq!(filenames.len(), 1); - Some(filenames[0].to_string()) - } else { - None - } - }) - .collect(); - // Should only generate one rmeta file. - assert_eq!(artifacts.len(), 1); - artifacts.pop().unwrap() - }; - - let nightly1_name = check("nightly1", false); - assert_eq!(check("nightly1", true), nightly1_name); - assert_eq!(check("nightly2", false), nightly1_name); // same as before - assert_eq!(check("nightly2", true), nightly1_name); - // Should rebuild going back to nightly1. - assert_eq!(check("nightly1", false), nightly1_name); - - let beta1_name = check("beta1", false); - assert_ne!(beta1_name, nightly1_name); - assert_eq!(check("beta1", true), beta1_name); - assert_eq!(check("beta2", false), beta1_name); // same as before - assert_eq!(check("beta2", true), beta1_name); - // Should rebuild going back to beta1. - assert_eq!(check("beta1", false), beta1_name); - - let stable1_name = check("stable1", false); - assert_ne!(stable1_name, nightly1_name); - assert_ne!(stable1_name, beta1_name); - let stable2_name = check("stable2", false); - assert_ne!(stable1_name, stable2_name); - // Check everything is fresh. - assert_eq!(check("stable1", true), stable1_name); - assert_eq!(check("stable2", true), stable2_name); - assert_eq!(check("beta1", true), beta1_name); - assert_eq!(check("nightly1", true), nightly1_name); -} - -#[cargo_test] -fn linking_interrupted() { - // Interrupt during the linking phase shouldn't leave test executable as "fresh". - - // This is used to detect when linking starts, then to pause the linker so - // that the test can kill cargo. - let link_listener = TcpListener::bind("127.0.0.1:0").unwrap(); - let link_addr = link_listener.local_addr().unwrap(); - - // This is used to detect when rustc exits. - let rustc_listener = TcpListener::bind("127.0.0.1:0").unwrap(); - let rustc_addr = rustc_listener.local_addr().unwrap(); - - // Create a linker that we can interrupt. - let linker = project() - .at("linker") - .file("Cargo.toml", &basic_manifest("linker", "1.0.0")) - .file( - "src/main.rs", - &r#" - fn main() { - // Figure out the output filename. - let output = match std::env::args().find(|a| a.starts_with("/OUT:")) { - Some(s) => s[5..].to_string(), - None => { - let mut args = std::env::args(); - loop { - if args.next().unwrap() == "-o" { - break; - } - } - args.next().unwrap() - } - }; - std::fs::remove_file(&output).unwrap(); - std::fs::write(&output, "").unwrap(); - // Tell the test that we are ready to be interrupted. - let mut socket = std::net::TcpStream::connect("__ADDR__").unwrap(); - // Wait for the test to kill us. - std::thread::sleep(std::time::Duration::new(60, 0)); - } - "# - .replace("__ADDR__", &link_addr.to_string()), - ) - .build(); - linker.cargo("build").run(); - - // Create a wrapper around rustc that will tell us when rustc is finished. - let rustc = project() - .at("rustc-waiter") - .file("Cargo.toml", &basic_manifest("rustc-waiter", "1.0.0")) - .file( - "src/main.rs", - &r#" - fn main() { - let mut conn = None; - // Check for a normal build (not -vV or --print). - if std::env::args().any(|arg| arg == "t1") { - // Tell the test that rustc has started. - conn = Some(std::net::TcpStream::connect("__ADDR__").unwrap()); - } - let status = std::process::Command::new("rustc") - .args(std::env::args().skip(1)) - .status() - .expect("rustc to run"); - std::process::exit(status.code().unwrap_or(1)); - } - "# - .replace("__ADDR__", &rustc_addr.to_string()), - ) - .build(); - rustc.cargo("build").run(); - - // Build it once so that the fingerprint gets saved to disk. - let p = project() - .file("src/lib.rs", "") - .file("tests/t1.rs", "") - .build(); - p.cargo("test --test t1 --no-run").run(); - - // Make a change, start a build, then interrupt it. - p.change_file("src/lib.rs", "// modified"); - let linker_env = format!("CARGO_TARGET_{}_LINKER", rustc_host_env()); - // NOTE: This assumes that the paths to the linker or rustc are not in the - // fingerprint. But maybe they should be? - let mut cmd = p - .cargo("test --test t1 --no-run") - .env(&linker_env, linker.bin("linker")) - .env("RUSTC", rustc.bin("rustc-waiter")) - .build_command(); - let mut child = cmd - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .env("__CARGO_TEST_SETSID_PLEASE_DONT_USE_ELSEWHERE", "1") - .spawn() - .unwrap(); - // Wait for rustc to start. - let mut rustc_conn = rustc_listener.accept().unwrap().0; - // Wait for linking to start. - drop(link_listener.accept().unwrap()); - - // Interrupt the child. - death::ctrl_c(&mut child); - assert!(!child.wait().unwrap().success()); - // Wait for rustc to exit. If we don't wait, then the command below could - // start while rustc is still being torn down. - let mut buf = [0]; - drop(rustc_conn.read_exact(&mut buf)); - - // Build again, shouldn't be fresh. - p.cargo("test --test t1") - .with_stderr( - "\ -[COMPILING] foo [..] -[FINISHED] [..] -[RUNNING] tests/t1.rs (target/debug/deps/t1[..]) -", - ) - .run(); -} - -#[cargo_test] -#[cfg_attr( - not(all(target_arch = "x86_64", target_os = "windows", target_env = "msvc")), - ignore -)] -fn lld_is_fresh() { - // Check for bug when using lld linker that it remains fresh with dylib. - let p = project() - .file( - ".cargo/config", - r#" - [target.x86_64-pc-windows-msvc] - linker = "rust-lld" - rustflags = ["-C", "link-arg=-fuse-ld=lld"] - "#, - ) - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [lib] - crate-type = ["dylib"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build -v") - .with_stderr("[FRESH] foo [..]\n[FINISHED] [..]") - .run(); -} - -#[cargo_test] -fn env_in_code_causes_rebuild() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - println!("{:?}", option_env!("FOO")); - println!("{:?}", option_env!("FOO\nBAR")); - } - "#, - ) - .build(); - - p.cargo("build").env_remove("FOO").run(); - p.cargo("build") - .env_remove("FOO") - .with_stderr("[FINISHED] [..]") - .run(); - p.cargo("build") - .env("FOO", "bar") - .with_stderr("[COMPILING][..]\n[FINISHED][..]") - .run(); - p.cargo("build") - .env("FOO", "bar") - .with_stderr("[FINISHED][..]") - .run(); - p.cargo("build") - .env("FOO", "baz") - .with_stderr("[COMPILING][..]\n[FINISHED][..]") - .run(); - p.cargo("build") - .env("FOO", "baz") - .with_stderr("[FINISHED][..]") - .run(); - p.cargo("build") - .env_remove("FOO") - .with_stderr("[COMPILING][..]\n[FINISHED][..]") - .run(); - p.cargo("build") - .env_remove("FOO") - .with_stderr("[FINISHED][..]") - .run(); - - let interesting = " #!$\nabc\r\\\t\u{8}\r\n"; - p.cargo("build").env("FOO", interesting).run(); - p.cargo("build") - .env("FOO", interesting) - .with_stderr("[FINISHED][..]") - .run(); - - p.cargo("build").env("FOO\nBAR", interesting).run(); - p.cargo("build") - .env("FOO\nBAR", interesting) - .with_stderr("[FINISHED][..]") - .run(); -} - -#[cargo_test] -fn env_build_script_no_rebuild() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file( - "build.rs", - r#" - fn main() { - println!("cargo:rustc-env=FOO=bar"); - } - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - println!("{:?}", env!("FOO")); - } - "#, - ) - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn cargo_env_changes() { - // Checks that changes to the env var CARGO in the dep-info file triggers - // a rebuild. - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file( - "src/main.rs", - r#" - fn main() { - println!("{:?}", env!("CARGO")); - } - "#, - ) - .build(); - - let cargo_exe = cargo_test_support::cargo_exe(); - let other_cargo_path = p.root().join(cargo_exe.file_name().unwrap()); - std::fs::hard_link(&cargo_exe, &other_cargo_path).unwrap(); - let other_cargo = || { - let mut pb = cargo_test_support::process(&other_cargo_path); - pb.cwd(p.root()); - cargo_test_support::execs().with_process_builder(pb) - }; - - p.cargo("check").run(); - other_cargo() - .arg("check") - .arg("-v") - .with_stderr( - "\ -[CHECKING] foo [..] -[RUNNING] `rustc [..] -[FINISHED] [..] -", - ) - .run(); - - // And just to confirm that without using env! it doesn't rebuild. - p.change_file("src/main.rs", "fn main() {}"); - p.cargo("check") - .with_stderr( - "\ -[CHECKING] foo [..] -[FINISHED] [..] -", - ) - .run(); - other_cargo() - .arg("check") - .arg("-v") - .with_stderr( - "\ -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn changing_linker() { - // Changing linker should rebuild. - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("build").run(); - let linker_env = format!("CARGO_TARGET_{}_LINKER", rustc_host_env()); - p.cargo("build --verbose") - .env(&linker_env, "nonexistent-linker") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] -C linker=nonexistent-linker [..]` -[ERROR] [..]linker[..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/future_incompat_report.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/future_incompat_report.rs deleted file mode 100644 index c6cb2ffd3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/future_incompat_report.rs +++ /dev/null @@ -1,410 +0,0 @@ -//! Tests for future-incompat-report messages -//! -//! Note that these tests use the -Zfuture-incompat-test for rustc. -//! This causes rustc to treat *every* lint as future-incompatible. -//! This is done because future-incompatible lints are inherently -//! ephemeral, but we don't want to continually update these tests. -//! So we pick some random lint that will likely always be the same -//! over time. - -use super::config::write_config_toml; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, is_nightly, project, Project}; - -// An arbitrary lint (unused_variables) that triggers a lint. -// We use a special flag to force it to generate a report. -const FUTURE_EXAMPLE: &'static str = "fn main() { let x = 1; }"; -// Some text that will be displayed when the lint fires. -const FUTURE_OUTPUT: &'static str = "[..]unused_variables[..]"; - -fn simple_project() -> Project { - project() - .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) - .file("src/main.rs", FUTURE_EXAMPLE) - .build() -} - -#[cargo_test] -fn output_on_stable() { - if !is_nightly() { - // -Zfuture-incompat-test requires nightly (permanently) - return; - } - let p = simple_project(); - - p.cargo("check") - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr_contains(FUTURE_OUTPUT) - .with_stderr_contains("[..]cargo report[..]") - .run(); -} - -// This feature is stable, and should not be gated -#[cargo_test] -fn no_gate_future_incompat_report() { - let p = simple_project(); - - p.cargo("build --future-incompat-report") - .with_status(0) - .run(); - - p.cargo("report future-incompatibilities --id foo") - .with_stderr_contains("error: no reports are currently available") - .with_status(101) - .run(); -} - -#[cargo_test] -fn test_zero_future_incompat() { - if !is_nightly() { - // -Zfuture-incompat-test requires nightly (permanently) - return; - } - - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.0.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - // No note if --future-incompat-report is not specified. - p.cargo("build") - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr( - "\ -[COMPILING] foo v0.0.0 [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("build --future-incompat-report") - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr( - "\ -[FINISHED] [..] -note: 0 dependencies had future-incompatible warnings -", - ) - .run(); -} - -#[cargo_test] -fn test_single_crate() { - if !is_nightly() { - // -Zfuture-incompat-test requires nightly (permanently) - return; - } - - let p = simple_project(); - - for command in &["build", "check", "rustc", "test"] { - let check_has_future_compat = || { - p.cargo(command) - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr_contains(FUTURE_OUTPUT) - .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: foo v0.0.0 [..]") - .with_stderr_does_not_contain("[..]incompatibility[..]") - .run(); - }; - - // Check that we show a message with no [future-incompat-report] config section - write_config_toml(""); - check_has_future_compat(); - - // Check that we show a message with `frequence = "always"` - write_config_toml( - "\ -[future-incompat-report] -frequency = 'always' -", - ); - check_has_future_compat(); - - // Check that we do not show a message with `frequency = "never"` - write_config_toml( - "\ -[future-incompat-report] -frequency = 'never' -", - ); - p.cargo(command) - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr_contains(FUTURE_OUTPUT) - .with_stderr_does_not_contain("[..]rejected[..]") - .with_stderr_does_not_contain("[..]incompatibility[..]") - .run(); - - // Check that passing `--future-incompat-report` overrides `frequency = 'never'` - p.cargo(command).arg("--future-incompat-report") - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr_contains(FUTURE_OUTPUT) - .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: foo v0.0.0 [..]") - .with_stderr_contains(" - foo:0.0.0[..]") - .run(); - } -} - -#[cargo_test] -fn test_multi_crate() { - if !is_nightly() { - // -Zfuture-incompat-test requires nightly (permanently) - return; - } - - Package::new("first-dep", "0.0.1") - .file("src/lib.rs", FUTURE_EXAMPLE) - .publish(); - Package::new("second-dep", "0.0.2") - .file("src/lib.rs", FUTURE_EXAMPLE) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - - [dependencies] - first-dep = "*" - second-dep = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - for command in &["build", "check", "rustc", "test"] { - p.cargo(command) - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr_does_not_contain(FUTURE_OUTPUT) - .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: first-dep v0.0.1, second-dep v0.0.2") - // Check that we don't have the 'triggers' message shown at the bottom of this loop, - // and that we don't explain how to show a per-package report - .with_stderr_does_not_contain("[..]triggers[..]") - .with_stderr_does_not_contain("[..]--package[..]") - .with_stderr_does_not_contain("[..]-p[..]") - .run(); - - p.cargo(command).arg("--future-incompat-report") - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr_contains("warning: the following packages contain code that will be rejected by a future version of Rust: first-dep v0.0.1, second-dep v0.0.2") - .with_stderr_contains(" - first-dep:0.0.1") - .with_stderr_contains(" - second-dep:0.0.2") - .run(); - - p.cargo("report future-incompatibilities").arg("--package").arg("first-dep:0.0.1") - .with_stdout_contains("The package `first-dep v0.0.1` currently triggers the following future incompatibility lints:") - .with_stdout_contains(FUTURE_OUTPUT) - .with_stdout_does_not_contain("[..]second-dep-0.0.2/src[..]") - .run(); - - p.cargo("report future-incompatibilities").arg("--package").arg("second-dep:0.0.2") - .with_stdout_contains("The package `second-dep v0.0.2` currently triggers the following future incompatibility lints:") - .with_stdout_contains(FUTURE_OUTPUT) - .with_stdout_does_not_contain("[..]first-dep-0.0.1/src[..]") - .run(); - } - - // Test that passing the correct id via '--id' doesn't generate a warning message - let output = p - .cargo("build") - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .exec_with_output() - .unwrap(); - - // Extract the 'id' from the stdout. We are looking - // for the id in a line of the form "run `cargo report future-incompatibilities --id yZ7S`" - // which is generated by Cargo to tell the user what command to run - // This is just to test that passing the id suppresses the warning mesasge. Any users needing - // access to the report from a shell script should use the `--future-incompat-report` flag - let stderr = std::str::from_utf8(&output.stderr).unwrap(); - - // Find '--id ' in the output - let mut iter = stderr.split(' '); - iter.find(|w| *w == "--id").unwrap(); - let id = iter - .next() - .unwrap_or_else(|| panic!("Unexpected output:\n{}", stderr)); - // Strip off the trailing '`' included in the output - let id: String = id.chars().take_while(|c| *c != '`').collect(); - - p.cargo(&format!("report future-incompatibilities --id {}", id)) - .with_stdout_contains("The package `first-dep v0.0.1` currently triggers the following future incompatibility lints:") - .with_stdout_contains("The package `second-dep v0.0.2` currently triggers the following future incompatibility lints:") - .run(); - - // Test without --id, and also the full output of the report. - let output = p - .cargo("report future-incompat") - .exec_with_output() - .unwrap(); - let output = std::str::from_utf8(&output.stdout).unwrap(); - assert!(output.starts_with("The following warnings were discovered")); - let mut lines = output - .lines() - // Skip the beginning of the per-package information. - .skip_while(|line| !line.starts_with("The package")); - for expected in &["first-dep v0.0.1", "second-dep v0.0.2"] { - assert_eq!( - &format!( - "The package `{}` currently triggers the following future incompatibility lints:", - expected - ), - lines.next().unwrap(), - "Bad output:\n{}", - output - ); - let mut count = 0; - while let Some(line) = lines.next() { - if line.is_empty() { - break; - } - count += 1; - } - assert!(count > 0); - } - assert_eq!(lines.next(), None); -} - -#[cargo_test] -fn color() { - if !is_nightly() { - // -Zfuture-incompat-test requires nightly (permanently) - return; - } - - let p = simple_project(); - - p.cargo("check") - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .masquerade_as_nightly_cargo() - .run(); - - p.cargo("report future-incompatibilities") - .masquerade_as_nightly_cargo() - .with_stdout_does_not_contain("[..]\x1b[[..]") - .run(); - - p.cargo("report future-incompatibilities") - .masquerade_as_nightly_cargo() - .env("CARGO_TERM_COLOR", "always") - .with_stdout_contains("[..]\x1b[[..]") - .run(); -} - -#[cargo_test] -fn bad_ids() { - if !is_nightly() { - // -Zfuture-incompat-test requires nightly (permanently) - return; - } - - let p = simple_project(); - - p.cargo("report future-incompatibilities --id 1") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("error: no reports are currently available") - .run(); - - p.cargo("check") - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .masquerade_as_nightly_cargo() - .run(); - - p.cargo("report future-incompatibilities --id foo") - .masquerade_as_nightly_cargo() - .with_status(1) - .with_stderr("error: Invalid value: could not parse `foo` as a number") - .run(); - - p.cargo("report future-incompatibilities --id 7") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: could not find report with ID 7 -Available IDs are: 1 -", - ) - .run(); -} - -#[cargo_test] -fn suggestions_for_updates() { - if !is_nightly() { - // -Zfuture-incompat-test requires nightly (permanently) - return; - } - - Package::new("with_updates", "1.0.0") - .file("src/lib.rs", FUTURE_EXAMPLE) - .publish(); - Package::new("big_update", "1.0.0") - .file("src/lib.rs", FUTURE_EXAMPLE) - .publish(); - Package::new("without_updates", "1.0.0") - .file("src/lib.rs", FUTURE_EXAMPLE) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - with_updates = "1" - big_update = "1" - without_updates = "1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("generate-lockfile").run(); - - Package::new("with_updates", "1.0.1") - .file("src/lib.rs", "") - .publish(); - Package::new("with_updates", "1.0.2") - .file("src/lib.rs", "") - .publish(); - Package::new("with_updates", "3.0.1") - .file("src/lib.rs", "") - .publish(); - Package::new("big_update", "2.0.0") - .file("src/lib.rs", "") - .publish(); - - // This is a hack to force cargo to update the index. Cargo can't do this - // automatically because doing a network update on every build would be a - // bad idea. Under normal circumstances, we'll hope the user has done - // something else along the way to trigger an update (building some other - // project or something). This could use some more consideration of how to - // handle this better (maybe only trigger an update if it hasn't updated - // in a long while?). - p.cargo("update -p without_updates").run(); - - let update_message = "\ -- Some affected dependencies have newer versions available. -You may want to consider updating them to a newer version to see if the issue has been fixed. - -big_update v1.0.0 has the following newer versions available: 2.0.0 -with_updates v1.0.0 has the following newer versions available: 1.0.1, 1.0.2, 3.0.1 -"; - - p.cargo("check --future-incompat-report") - .masquerade_as_nightly_cargo() - .env("RUSTFLAGS", "-Zfuture-incompat-test") - .with_stderr_contains(update_message) - .run(); - - p.cargo("report future-incompatibilities") - .masquerade_as_nightly_cargo() - .with_stdout_contains(update_message) - .run() -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/generate_lockfile.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/generate_lockfile.rs deleted file mode 100644 index 30218bc7e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/generate_lockfile.rs +++ /dev/null @@ -1,220 +0,0 @@ -//! Tests for the `cargo generate-lockfile` command. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, paths, project, ProjectBuilder}; -use std::fs; - -#[cargo_test] -fn adding_and_removing_packages() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("generate-lockfile").run(); - - let lock1 = p.read_lockfile(); - - // add a dep - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [dependencies.bar] - path = "bar" - "#, - ); - p.cargo("generate-lockfile").run(); - let lock2 = p.read_lockfile(); - assert_ne!(lock1, lock2); - - // change the dep - p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.0.2")); - p.cargo("generate-lockfile").run(); - let lock3 = p.read_lockfile(); - assert_ne!(lock1, lock3); - assert_ne!(lock2, lock3); - - // remove the dep - println!("lock4"); - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - "#, - ); - p.cargo("generate-lockfile").run(); - let lock4 = p.read_lockfile(); - assert_eq!(lock1, lock4); -} - -#[cargo_test] -fn no_index_update() { - Package::new("serde", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [dependencies] - serde = "1.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("generate-lockfile") - .with_stderr("[UPDATING] `[..]` index") - .run(); - - p.cargo("generate-lockfile -Zno-index-update") - .masquerade_as_nightly_cargo() - .with_stdout("") - .with_stderr("") - .run(); -} - -#[cargo_test] -fn preserve_metadata() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("generate-lockfile").run(); - - let metadata = r#" -[metadata] -bar = "baz" -foo = "bar" -"#; - let lock = p.read_lockfile(); - let data = lock + metadata; - p.change_file("Cargo.lock", &data); - - // Build and make sure the metadata is still there - p.cargo("build").run(); - let lock = p.read_lockfile(); - assert!(lock.contains(metadata.trim()), "{}", lock); - - // Update and make sure the metadata is still there - p.cargo("update").run(); - let lock = p.read_lockfile(); - assert!(lock.contains(metadata.trim()), "{}", lock); -} - -#[cargo_test] -fn preserve_line_endings_issue_2076() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - let lockfile = p.root().join("Cargo.lock"); - p.cargo("generate-lockfile").run(); - assert!(lockfile.is_file()); - p.cargo("generate-lockfile").run(); - - let lock0 = p.read_lockfile(); - - assert!(lock0.starts_with("# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.\n")); - - let lock1 = lock0.replace("\n", "\r\n"); - p.change_file("Cargo.lock", &lock1); - - p.cargo("generate-lockfile").run(); - - let lock2 = p.read_lockfile(); - - assert!(lock2.starts_with("# This file is automatically @generated by Cargo.\r\n# It is not intended for manual editing.\r\n")); - assert_eq!(lock1, lock2); -} - -#[cargo_test] -fn cargo_update_generate_lockfile() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - let lockfile = p.root().join("Cargo.lock"); - assert!(!lockfile.is_file()); - p.cargo("update").with_stdout("").run(); - assert!(lockfile.is_file()); - - fs::remove_file(p.root().join("Cargo.lock")).unwrap(); - - assert!(!lockfile.is_file()); - p.cargo("update").with_stdout("").run(); - assert!(lockfile.is_file()); -} - -#[cargo_test] -fn duplicate_entries_in_lockfile() { - let _a = ProjectBuilder::new(paths::root().join("a")) - .file( - "Cargo.toml", - r#" - [package] - name = "a" - authors = [] - version = "0.0.1" - - [dependencies] - common = {path="common"} - "#, - ) - .file("src/lib.rs", "") - .build(); - - let common_toml = &basic_manifest("common", "0.0.1"); - - let _common_in_a = ProjectBuilder::new(paths::root().join("a/common")) - .file("Cargo.toml", common_toml) - .file("src/lib.rs", "") - .build(); - - let b = ProjectBuilder::new(paths::root().join("b")) - .file( - "Cargo.toml", - r#" - [package] - name = "b" - authors = [] - version = "0.0.1" - - [dependencies] - common = {path="common"} - a = {path="../a"} - "#, - ) - .file("src/lib.rs", "") - .build(); - - let _common_in_b = ProjectBuilder::new(paths::root().join("b/common")) - .file("Cargo.toml", common_toml) - .file("src/lib.rs", "") - .build(); - - // should fail due to a duplicate package `common` in the lock file - b.cargo("build") - .with_status(101) - .with_stderr_contains( - "[..]package collision in the lockfile: packages common [..] and \ - common [..] are different, but only one can be written to \ - lockfile unambiguously", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git.rs deleted file mode 100644 index 293c5db18..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git.rs +++ /dev/null @@ -1,3386 +0,0 @@ -//! Tests for git support. - -use std::env; -use std::fs; -use std::io::prelude::*; -use std::net::{TcpListener, TcpStream}; -use std::path::Path; -use std::str; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::Arc; -use std::thread; - -use cargo_test_support::paths::{self, CargoPathExt}; -use cargo_test_support::{basic_lib_manifest, basic_manifest, git, main_file, path2url, project}; -use cargo_test_support::{sleep_ms, t, Project}; - -fn disable_git_cli() -> bool { - // mingw git on Windows does not support Windows-style file URIs. - // Appveyor in the rust repo has that git up front in the PATH instead - // of Git-for-Windows, which causes this to fail. - env::var("CARGO_TEST_DISABLE_GIT_CLI") == Ok("1".to_string()) -} - -#[cargo_test] -fn cargo_compile_simple_git_dep() { - let project = project(); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file( - "src/dep1.rs", - r#" - pub fn hello() -> &'static str { - "hello world" - } - "#, - ) - }); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - "#, - git_project.url() - ), - ) - .file( - "src/main.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"]), - ) - .build(); - - let git_root = git_project.root(); - - project - .cargo("build") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [COMPILING] dep1 v0.5.0 ({}#[..])\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - path2url(&git_root), - path2url(&git_root), - )) - .run(); - - assert!(project.bin("foo").is_file()); - - project - .process(&project.bin("foo")) - .with_stdout("hello world\n") - .run(); -} - -#[cargo_test] -fn cargo_compile_git_dep_branch() { - let project = project(); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file( - "src/dep1.rs", - r#" - pub fn hello() -> &'static str { - "hello world" - } - "#, - ) - }); - - // Make a new branch based on the current HEAD commit - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let head = repo.head().unwrap().target().unwrap(); - let head = repo.find_commit(head).unwrap(); - repo.branch("branchy", &head, true).unwrap(); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - branch = "branchy" - - "#, - git_project.url() - ), - ) - .file( - "src/main.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"]), - ) - .build(); - - let git_root = git_project.root(); - - project - .cargo("build") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [COMPILING] dep1 v0.5.0 ({}?branch=branchy#[..])\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - path2url(&git_root), - path2url(&git_root), - )) - .run(); - - assert!(project.bin("foo").is_file()); - - project - .process(&project.bin("foo")) - .with_stdout("hello world\n") - .run(); -} - -#[cargo_test] -fn cargo_compile_git_dep_tag() { - let project = project(); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file( - "src/dep1.rs", - r#" - pub fn hello() -> &'static str { - "hello world" - } - "#, - ) - }); - - // Make a tag corresponding to the current HEAD - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let head = repo.head().unwrap().target().unwrap(); - repo.tag( - "v0.1.0", - &repo.find_object(head, None).unwrap(), - &repo.signature().unwrap(), - "make a new tag", - false, - ) - .unwrap(); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - tag = "v0.1.0" - "#, - git_project.url() - ), - ) - .file( - "src/main.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"]), - ) - .build(); - - let git_root = git_project.root(); - - project - .cargo("build") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [COMPILING] dep1 v0.5.0 ({}?tag=v0.1.0#[..])\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - path2url(&git_root), - path2url(&git_root), - )) - .run(); - - assert!(project.bin("foo").is_file()); - - project - .process(&project.bin("foo")) - .with_stdout("hello world\n") - .run(); - - project.cargo("build").run(); -} - -#[cargo_test] -fn cargo_compile_git_dep_pull_request() { - let project = project(); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file( - "src/dep1.rs", - r#" - pub fn hello() -> &'static str { - "hello world" - } - "#, - ) - }); - - // Make a reference in GitHub's pull request ref naming convention. - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let oid = repo.refname_to_id("HEAD").unwrap(); - let force = false; - let log_message = "open pull request"; - repo.reference("refs/pull/330/head", oid, force, log_message) - .unwrap(); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.0" - - [dependencies] - dep1 = {{ git = "{}", rev = "refs/pull/330/head" }} - "#, - git_project.url() - ), - ) - .file( - "src/main.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"]), - ) - .build(); - - let git_root = git_project.root(); - - project - .cargo("build") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [COMPILING] dep1 v0.5.0 ({}?rev=refs/pull/330/head#[..])\n\ - [COMPILING] foo v0.0.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - path2url(&git_root), - path2url(&git_root), - )) - .run(); - - assert!(project.bin("foo").is_file()); -} - -#[cargo_test] -fn cargo_compile_with_nested_paths() { - let git_project = git::new("dep1", |project| { - project - .file( - "Cargo.toml", - r#" - [project] - - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - - [dependencies.dep2] - - version = "0.5.0" - path = "vendor/dep2" - - [lib] - - name = "dep1" - "#, - ) - .file( - "src/dep1.rs", - r#" - extern crate dep2; - - pub fn hello() -> &'static str { - dep2::hello() - } - "#, - ) - .file("vendor/dep2/Cargo.toml", &basic_lib_manifest("dep2")) - .file( - "vendor/dep2/src/dep2.rs", - r#" - pub fn hello() -> &'static str { - "hello world" - } - "#, - ) - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - version = "0.5.0" - git = '{}' - - [[bin]] - - name = "foo" - "#, - git_project.url() - ), - ) - .file( - "src/foo.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"]), - ) - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("hello world\n").run(); -} - -#[cargo_test] -fn cargo_compile_with_malformed_nested_paths() { - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file( - "src/dep1.rs", - r#" - pub fn hello() -> &'static str { - "hello world" - } - "#, - ) - .file("vendor/dep2/Cargo.toml", "!INVALID!") - .file( - "vendor/dep3/Cargo.toml", - r#" - [project] - name = "dep3" - version = "0.5.0" - [dependencies] - subdep1 = { path = "../require-extra-build-step" } - "#, - ) - .file("vendor/dep3/src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - version = "0.5.0" - git = '{}' - - [[bin]] - - name = "foo" - "#, - git_project.url() - ), - ) - .file( - "src/foo.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"]), - ) - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("hello world\n").run(); -} - -#[cargo_test] -fn cargo_compile_with_meta_package() { - let git_project = git::new("meta-dep", |project| { - project - .file("dep1/Cargo.toml", &basic_lib_manifest("dep1")) - .file( - "dep1/src/dep1.rs", - r#" - pub fn hello() -> &'static str { - "this is dep1" - } - "#, - ) - .file("dep2/Cargo.toml", &basic_lib_manifest("dep2")) - .file( - "dep2/src/dep2.rs", - r#" - pub fn hello() -> &'static str { - "this is dep2" - } - "#, - ) - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - version = "0.5.0" - git = '{}' - - [dependencies.dep2] - - version = "0.5.0" - git = '{}' - - [[bin]] - - name = "foo" - "#, - git_project.url(), - git_project.url() - ), - ) - .file( - "src/foo.rs", - &main_file( - r#""{} {}", dep1::hello(), dep2::hello()"#, - &["dep1", "dep2"], - ), - ) - .build(); - - p.cargo("build").run(); - - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")) - .with_stdout("this is dep1 this is dep2\n") - .run(); -} - -#[cargo_test] -fn cargo_compile_with_short_ssh_git() { - let url = "git@github.com:a/dep"; - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep] - - git = "{}" - - [[bin]] - - name = "foo" - "#, - url - ), - ) - .file( - "src/foo.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"]), - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stdout("") - .with_stderr(&format!( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - invalid url `{}`: relative URL without a base -", - url - )) - .run(); -} - -#[cargo_test] -fn two_revs_same_deps() { - let bar = git::new("meta-dep", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.0.0")) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - }); - - let repo = git2::Repository::open(&bar.root()).unwrap(); - let rev1 = repo.revparse_single("HEAD").unwrap().id(); - - // Commit the changes and make sure we trigger a recompile - bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }"); - git::add(&repo); - let rev2 = git::commit(&repo); - - let foo = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = '{}' - rev = "{}" - - [dependencies.baz] - path = "../baz" - "#, - bar.url(), - rev1 - ), - ) - .file( - "src/main.rs", - r#" - extern crate bar; - extern crate baz; - - fn main() { - assert_eq!(bar::bar(), 1); - assert_eq!(baz::baz(), 2); - } - "#, - ) - .build(); - - let _baz = project() - .at("baz") - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "baz" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = '{}' - rev = "{}" - "#, - bar.url(), - rev2 - ), - ) - .file( - "src/lib.rs", - r#" - extern crate bar; - pub fn baz() -> i32 { bar::bar() } - "#, - ) - .build(); - - foo.cargo("build -v").run(); - assert!(foo.bin("foo").is_file()); - foo.process(&foo.bin("foo")).run(); -} - -#[cargo_test] -fn recompilation() { - let git_project = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("bar")) - .file("src/bar.rs", "pub fn bar() {}") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - git = '{}' - "#, - git_project.url() - ), - ) - .file("src/main.rs", &main_file(r#""{:?}", bar::bar()"#, &["bar"])) - .build(); - - // First time around we should compile both foo and bar - p.cargo("build") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [COMPILING] bar v0.5.0 ({}#[..])\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - git_project.url(), - git_project.url(), - )) - .run(); - - // Don't recompile the second time - p.cargo("build").with_stdout("").run(); - - // Modify a file manually, shouldn't trigger a recompile - git_project.change_file("src/bar.rs", r#"pub fn bar() { println!("hello!"); }"#); - - p.cargo("build").with_stdout("").run(); - - p.cargo("update") - .with_stderr(&format!( - "[UPDATING] git repository `{}`", - git_project.url() - )) - .run(); - - p.cargo("build").with_stdout("").run(); - - // Commit the changes and make sure we don't trigger a recompile because the - // lock file says not to change - let repo = git2::Repository::open(&git_project.root()).unwrap(); - git::add(&repo); - git::commit(&repo); - - println!("compile after commit"); - p.cargo("build").with_stdout("").run(); - p.root().move_into_the_past(); - - // Update the dependency and carry on! - p.cargo("update") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ - ", - git_project.url() - )) - .run(); - println!("going for the last compile"); - p.cargo("build") - .with_stderr(&format!( - "[COMPILING] bar v0.5.0 ({}#[..])\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - git_project.url(), - )) - .run(); - - // Make sure clean only cleans one dep - p.cargo("clean -p foo").with_stdout("").run(); - p.cargo("build") - .with_stderr( - "[COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]", - ) - .run(); -} - -#[cargo_test] -fn update_with_shared_deps() { - let git_project = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("bar")) - .file("src/bar.rs", "pub fn bar() {}") - }); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - path = "dep1" - [dependencies.dep2] - path = "dep2" - "#, - ) - .file( - "src/main.rs", - r#" - #[allow(unused_extern_crates)] - extern crate dep1; - #[allow(unused_extern_crates)] - extern crate dep2; - fn main() {} - "#, - ) - .file( - "dep1/Cargo.toml", - &format!( - r#" - [package] - name = "dep1" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - version = "0.5.0" - git = '{}' - "#, - git_project.url() - ), - ) - .file("dep1/src/lib.rs", "") - .file( - "dep2/Cargo.toml", - &format!( - r#" - [package] - name = "dep2" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - version = "0.5.0" - git = '{}' - "#, - git_project.url() - ), - ) - .file("dep2/src/lib.rs", "") - .build(); - - // First time around we should compile both foo and bar - p.cargo("build") - .with_stderr(&format!( - "\ -[UPDATING] git repository `{git}` -[COMPILING] bar v0.5.0 ({git}#[..]) -[COMPILING] [..] v0.5.0 ([..]) -[COMPILING] [..] v0.5.0 ([..]) -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - git = git_project.url(), - )) - .run(); - - // Modify a file manually, and commit it - git_project.change_file("src/bar.rs", r#"pub fn bar() { println!("hello!"); }"#); - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let old_head = repo.head().unwrap().target().unwrap(); - git::add(&repo); - git::commit(&repo); - - sleep_ms(1000); - - // By default, not transitive updates - println!("dep1 update"); - p.cargo("update -p dep1").with_stdout("").run(); - - // Don't do anything bad on a weird --precise argument - println!("bar bad precise update"); - p.cargo("update -p bar --precise 0.1.2") - .with_status(101) - .with_stderr( - "\ -[ERROR] Unable to update [..] - -Caused by: - precise value for git is not a git revision: 0.1.2 - -Caused by: - unable to parse OID - contains invalid characters; class=Invalid (3) -", - ) - .run(); - - // Specifying a precise rev to the old rev shouldn't actually update - // anything because we already have the rev in the db. - println!("bar precise update"); - p.cargo("update -p bar --precise") - .arg(&old_head.to_string()) - .with_stdout("") - .run(); - - // Updating aggressively should, however, update the repo. - println!("dep1 aggressive update"); - p.cargo("update -p dep1 --aggressive") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [UPDATING] bar v0.5.0 ([..]) -> #[..]\n\ - ", - git_project.url() - )) - .run(); - - // Make sure we still only compile one version of the git repo - println!("build"); - p.cargo("build") - .with_stderr(&format!( - "\ -[COMPILING] bar v0.5.0 ({git}#[..]) -[COMPILING] [..] v0.5.0 ([CWD][..]dep[..]) -[COMPILING] [..] v0.5.0 ([CWD][..]dep[..]) -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - git = git_project.url(), - )) - .run(); - - // We should be able to update transitive deps - p.cargo("update -p bar") - .with_stderr(&format!( - "[UPDATING] git repository `{}`", - git_project.url() - )) - .run(); -} - -#[cargo_test] -fn dep_with_submodule() { - let project = project(); - let git_project = git::new("dep1", |project| { - project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - }); - let git_project2 = git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")); - - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let url = path2url(git_project2.root()).to_string(); - git::add_submodule(&repo, &url, Path::new("src")); - git::commit(&repo); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - "#, - git_project.url() - ), - ) - .file( - "src/lib.rs", - "extern crate dep1; pub fn foo() { dep1::dep() }", - ) - .build(); - - project - .cargo("build") - .with_stderr( - "\ -[UPDATING] git repository [..] -[UPDATING] git submodule `file://[..]/dep2` -[COMPILING] dep1 [..] -[COMPILING] foo [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn dep_with_bad_submodule() { - let project = project(); - let git_project = git::new("dep1", |project| { - project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - }); - let git_project2 = git::new("dep2", |project| project.file("lib.rs", "pub fn dep() {}")); - - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let url = path2url(git_project2.root()).to_string(); - git::add_submodule(&repo, &url, Path::new("src")); - git::commit(&repo); - - // now amend the first commit on git_project2 to make submodule ref point to not-found - // commit - let repo = git2::Repository::open(&git_project2.root()).unwrap(); - let original_submodule_ref = repo.refname_to_id("refs/heads/master").unwrap(); - let commit = repo.find_commit(original_submodule_ref).unwrap(); - commit - .amend( - Some("refs/heads/master"), - None, - None, - None, - Some("something something"), - None, - ) - .unwrap(); - - let p = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - "#, - git_project.url() - ), - ) - .file( - "src/lib.rs", - "extern crate dep1; pub fn foo() { dep1::dep() }", - ) - .build(); - - let expected = format!( - "\ -[UPDATING] git repository [..] -[UPDATING] git submodule `file://[..]/dep2` -[ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 [..]` - -Caused by: - failed to load source for dependency `dep1` - -Caused by: - Unable to update {} - -Caused by: - failed to update submodule `src` - -Caused by: - object not found - no match for id [..] -", - path2url(git_project.root()) - ); - - p.cargo("build") - .with_stderr(expected) - .with_status(101) - .run(); -} - -#[cargo_test] -fn two_deps_only_update_one() { - let project = project(); - let git1 = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - .file("src/lib.rs", "") - }); - let git2 = git::new("dep2", |project| { - project - .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) - .file("src/lib.rs", "") - }); - - let p = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - git = '{}' - [dependencies.dep2] - git = '{}' - "#, - git1.url(), - git2.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - fn oid_to_short_sha(oid: git2::Oid) -> String { - oid.to_string()[..8].to_string() - } - fn git_repo_head_sha(p: &Project) -> String { - let repo = git2::Repository::open(p.root()).unwrap(); - let head = repo.head().unwrap().target().unwrap(); - oid_to_short_sha(head) - } - - println!("dep1 head sha: {}", git_repo_head_sha(&git1)); - println!("dep2 head sha: {}", git_repo_head_sha(&git2)); - - p.cargo("build") - .with_stderr( - "[UPDATING] git repository `[..]`\n\ - [UPDATING] git repository `[..]`\n\ - [COMPILING] [..] v0.5.0 ([..])\n\ - [COMPILING] [..] v0.5.0 ([..])\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); - - git1.change_file("src/lib.rs", "pub fn foo() {}"); - let repo = git2::Repository::open(&git1.root()).unwrap(); - git::add(&repo); - let oid = git::commit(&repo); - println!("dep1 head sha: {}", oid_to_short_sha(oid)); - - p.cargo("update -p dep1") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ - ", - git1.url() - )) - .run(); -} - -#[cargo_test] -fn stale_cached_version() { - let bar = git::new("meta-dep", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.0.0")) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - }); - - // Update the git database in the cache with the current state of the git - // repo - let foo = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies.bar] - git = '{}' - "#, - bar.url() - ), - ) - .file( - "src/main.rs", - r#" - extern crate bar; - - fn main() { assert_eq!(bar::bar(), 1) } - "#, - ) - .build(); - - foo.cargo("build").run(); - foo.process(&foo.bin("foo")).run(); - - // Update the repo, and simulate someone else updating the lock file and then - // us pulling it down. - bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 1 + 0 }"); - let repo = git2::Repository::open(&bar.root()).unwrap(); - git::add(&repo); - git::commit(&repo); - - sleep_ms(1000); - - let rev = repo.revparse_single("HEAD").unwrap().id(); - - foo.change_file( - "Cargo.lock", - &format!( - r#" - [[package]] - name = "foo" - version = "0.0.0" - dependencies = [ - 'bar 0.0.0 (git+{url}#{hash})' - ] - - [[package]] - name = "bar" - version = "0.0.0" - source = 'git+{url}#{hash}' - "#, - url = bar.url(), - hash = rev - ), - ); - - // Now build! - foo.cargo("build") - .with_stderr(&format!( - "\ -[UPDATING] git repository `{bar}` -[COMPILING] bar v0.0.0 ({bar}#[..]) -[COMPILING] foo v0.0.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - bar = bar.url(), - )) - .run(); - foo.process(&foo.bin("foo")).run(); -} - -#[cargo_test] -fn dep_with_changed_submodule() { - let project = project(); - let git_project = git::new("dep1", |project| { - project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - }); - - let git_project2 = git::new("dep2", |project| { - project.file("lib.rs", "pub fn dep() -> &'static str { \"project2\" }") - }); - - let git_project3 = git::new("dep3", |project| { - project.file("lib.rs", "pub fn dep() -> &'static str { \"project3\" }") - }); - - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let mut sub = git::add_submodule(&repo, &git_project2.url().to_string(), Path::new("src")); - git::commit(&repo); - - let p = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - [dependencies.dep1] - git = '{}' - "#, - git_project.url() - ), - ) - .file( - "src/main.rs", - " - extern crate dep1; - pub fn main() { println!(\"{}\", dep1::dep()) } - ", - ) - .build(); - - println!("first run"); - p.cargo("run") - .with_stderr( - "[UPDATING] git repository `[..]`\n\ - [UPDATING] git submodule `file://[..]/dep2`\n\ - [COMPILING] dep1 v0.5.0 ([..])\n\ - [COMPILING] foo v0.5.0 ([..])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in \ - [..]\n\ - [RUNNING] `target/debug/foo[EXE]`\n", - ) - .with_stdout("project2\n") - .run(); - - git_project.change_file( - ".gitmodules", - &format!( - "[submodule \"src\"]\n\tpath = src\n\turl={}", - git_project3.url() - ), - ); - - // Sync the submodule and reset it to the new remote. - sub.sync().unwrap(); - { - let subrepo = sub.open().unwrap(); - subrepo - .remote_add_fetch("origin", "refs/heads/*:refs/heads/*") - .unwrap(); - subrepo - .remote_set_url("origin", &git_project3.url().to_string()) - .unwrap(); - let mut origin = subrepo.find_remote("origin").unwrap(); - origin.fetch(&Vec::::new(), None, None).unwrap(); - let id = subrepo.refname_to_id("refs/remotes/origin/master").unwrap(); - let obj = subrepo.find_object(id, None).unwrap(); - subrepo.reset(&obj, git2::ResetType::Hard, None).unwrap(); - } - sub.add_to_index(true).unwrap(); - git::add(&repo); - git::commit(&repo); - - sleep_ms(1000); - // Update the dependency and carry on! - println!("update"); - p.cargo("update -v") - .with_stderr("") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [UPDATING] git submodule `file://[..]/dep3`\n\ - [UPDATING] dep1 v0.5.0 ([..]) -> #[..]\n\ - ", - git_project.url() - )) - .run(); - - println!("last run"); - p.cargo("run") - .with_stderr( - "[COMPILING] dep1 v0.5.0 ([..])\n\ - [COMPILING] foo v0.5.0 ([..])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in \ - [..]\n\ - [RUNNING] `target/debug/foo[EXE]`\n", - ) - .with_stdout("project3\n") - .run(); -} - -#[cargo_test] -fn dev_deps_with_testing() { - let p2 = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file( - "src/lib.rs", - r#" - pub fn gimme() -> &'static str { "zoidberg" } - "#, - ) - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dev-dependencies.bar] - version = "0.5.0" - git = '{}' - "#, - p2.url() - ), - ) - .file( - "src/main.rs", - r#" - fn main() {} - - #[cfg(test)] - mod tests { - extern crate bar; - #[test] fn foo() { bar::gimme(); } - } - "#, - ) - .build(); - - // Generate a lock file which did not use `bar` to compile, but had to update - // `bar` to generate the lock file - p.cargo("build") - .with_stderr(&format!( - "\ -[UPDATING] git repository `{bar}` -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - bar = p2.url() - )) - .run(); - - // Make sure we use the previous resolution of `bar` instead of updating it - // a second time. - p.cargo("test") - .with_stderr( - "\ -[COMPILING] [..] v0.5.0 ([..]) -[COMPILING] [..] v0.5.0 ([..] -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test tests::foo ... ok") - .run(); -} - -#[cargo_test] -fn git_build_cmd_freshness() { - let foo = git::new("foo", |project| { - project - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - build = "build.rs" - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - .file(".gitignore", "src/bar.rs") - }); - foo.root().move_into_the_past(); - - sleep_ms(1000); - - foo.cargo("build") - .with_stderr( - "\ -[COMPILING] foo v0.0.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - // Smoke test to make sure it doesn't compile again - println!("first pass"); - foo.cargo("build").with_stdout("").run(); - - // Modify an ignored file and make sure we don't rebuild - println!("second pass"); - foo.change_file("src/bar.rs", ""); - foo.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn git_name_not_always_needed() { - let p2 = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file( - "src/lib.rs", - r#" - pub fn gimme() -> &'static str { "zoidberg" } - "#, - ) - }); - - let repo = git2::Repository::open(&p2.root()).unwrap(); - let mut cfg = repo.config().unwrap(); - let _ = cfg.remove("user.name"); - let _ = cfg.remove("user.email"); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dev-dependencies.bar] - git = '{}' - "#, - p2.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - // Generate a lock file which did not use `bar` to compile, but had to update - // `bar` to generate the lock file - p.cargo("build") - .with_stderr(&format!( - "\ -[UPDATING] git repository `{bar}` -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - bar = p2.url() - )) - .run(); -} - -#[cargo_test] -fn git_repo_changing_no_rebuild() { - let bar = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - }); - - // Lock p1 to the first rev in the git repo - let p1 = project() - .at("p1") - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "p1" - version = "0.5.0" - authors = [] - build = 'build.rs' - [dependencies.bar] - git = '{}' - "#, - bar.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .file("build.rs", "fn main() {}") - .build(); - p1.root().move_into_the_past(); - p1.cargo("build") - .with_stderr(&format!( - "\ -[UPDATING] git repository `{bar}` -[COMPILING] [..] -[COMPILING] [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - bar = bar.url() - )) - .run(); - - // Make a commit to lock p2 to a different rev - bar.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }"); - let repo = git2::Repository::open(&bar.root()).unwrap(); - git::add(&repo); - git::commit(&repo); - - // Lock p2 to the second rev - let p2 = project() - .at("p2") - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "p2" - version = "0.5.0" - authors = [] - [dependencies.bar] - git = '{}' - "#, - bar.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - p2.cargo("build") - .with_stderr(&format!( - "\ -[UPDATING] git repository `{bar}` -[COMPILING] [..] -[COMPILING] [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - bar = bar.url() - )) - .run(); - - // And now for the real test! Make sure that p1 doesn't get rebuilt - // even though the git repo has changed. - p1.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn git_dep_build_cmd() { - let p = git::new("foo", |project| { - project - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "bar" - - [[bin]] - - name = "foo" - "#, - ) - .file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - - [lib] - name = "bar" - path = "src/bar.rs" - "#, - ) - .file( - "bar/src/bar.rs.in", - r#" - pub fn gimme() -> i32 { 0 } - "#, - ) - .file( - "bar/build.rs", - r#" - use std::fs; - fn main() { - fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); - } - "#, - ) - }); - - p.root().join("bar").move_into_the_past(); - - p.cargo("build").run(); - - p.process(&p.bin("foo")).with_stdout("0\n").run(); - - // Touching bar.rs.in should cause the `build` command to run again. - p.change_file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 1 }"); - - p.cargo("build").run(); - - p.process(&p.bin("foo")).with_stdout("1\n").run(); -} - -#[cargo_test] -fn fetch_downloads() { - let bar = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "pub fn bar() -> i32 { 1 }") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.bar] - git = '{}' - "#, - bar.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - p.cargo("fetch") - .with_stderr(&format!( - "[UPDATING] git repository `{url}`", - url = bar.url() - )) - .run(); - - p.cargo("fetch").with_stdout("").run(); -} - -#[cargo_test] -fn warnings_in_git_dep() { - let bar = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "fn unused() {}") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.bar] - git = '{}' - "#, - bar.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_stderr(&format!( - "[UPDATING] git repository `{}`\n\ - [COMPILING] bar v0.5.0 ({}#[..])\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n", - bar.url(), - bar.url(), - )) - .run(); -} - -#[cargo_test] -fn update_ambiguous() { - let bar1 = git::new("bar1", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "") - }); - let bar2 = git::new("bar2", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.6.0")) - .file("src/lib.rs", "") - }); - let baz = git::new("baz", |project| { - project - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "baz" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - git = '{}' - "#, - bar2.url() - ), - ) - .file("src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.bar] - git = '{}' - [dependencies.baz] - git = '{}' - "#, - bar1.url(), - baz.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("generate-lockfile").run(); - p.cargo("update -p bar") - .with_status(101) - .with_stderr( - "\ -[ERROR] There are multiple `bar` packages in your project, and the specification `bar` \ -is ambiguous. -Please re-run this command with `-p ` where `` is one of the \ -following: - bar:0.[..].0 - bar:0.[..].0 -", - ) - .run(); -} - -#[cargo_test] -fn update_one_dep_in_repo_with_many_deps() { - let bar = git::new("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) - .file("a/src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.bar] - git = '{}' - [dependencies.a] - git = '{}' - "#, - bar.url(), - bar.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("generate-lockfile").run(); - p.cargo("update -p bar") - .with_stderr(&format!("[UPDATING] git repository `{}`", bar.url())) - .run(); -} - -#[cargo_test] -fn switch_deps_does_not_update_transitive() { - let transitive = git::new("transitive", |project| { - project - .file("Cargo.toml", &basic_manifest("transitive", "0.5.0")) - .file("src/lib.rs", "") - }); - let dep1 = git::new("dep1", |project| { - project - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "dep" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.transitive] - git = '{}' - "#, - transitive.url() - ), - ) - .file("src/lib.rs", "") - }); - let dep2 = git::new("dep2", |project| { - project - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "dep" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.transitive] - git = '{}' - "#, - transitive.url() - ), - ) - .file("src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.dep] - git = '{}' - "#, - dep1.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_stderr(&format!( - "\ -[UPDATING] git repository `{}` -[UPDATING] git repository `{}` -[COMPILING] transitive [..] -[COMPILING] dep [..] -[COMPILING] foo [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - dep1.url(), - transitive.url() - )) - .run(); - - // Update the dependency to point to the second repository, but this - // shouldn't update the transitive dependency which is the same. - p.change_file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.dep] - git = '{}' - "#, - dep2.url() - ), - ); - - p.cargo("build") - .with_stderr(&format!( - "\ -[UPDATING] git repository `{}` -[COMPILING] dep [..] -[COMPILING] foo [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - dep2.url() - )) - .run(); -} - -#[cargo_test] -fn update_one_source_updates_all_packages_in_that_git_source() { - let dep = git::new("dep", |project| { - project - .file( - "Cargo.toml", - r#" - [package] - name = "dep" - version = "0.5.0" - authors = [] - - [dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) - .file("a/src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.dep] - git = '{}' - "#, - dep.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - - let repo = git2::Repository::open(&dep.root()).unwrap(); - let rev1 = repo.revparse_single("HEAD").unwrap().id(); - - // Just be sure to change a file - dep.change_file("src/lib.rs", "pub fn bar() -> i32 { 2 }"); - git::add(&repo); - git::commit(&repo); - - p.cargo("update -p dep").run(); - let lockfile = p.read_lockfile(); - assert!( - !lockfile.contains(&rev1.to_string()), - "{} in {}", - rev1, - lockfile - ); -} - -#[cargo_test] -fn switch_sources() { - let a1 = git::new("a1", |project| { - project - .file("Cargo.toml", &basic_manifest("a", "0.5.0")) - .file("src/lib.rs", "") - }); - let a2 = git::new("a2", |project| { - project - .file("Cargo.toml", &basic_manifest("a", "0.5.1")) - .file("src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies.b] - path = "b" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "b/Cargo.toml", - &format!( - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies.a] - git = '{}' - "#, - a1.url() - ), - ) - .file("b/src/lib.rs", "pub fn main() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `file://[..]a1` -[COMPILING] a v0.5.0 ([..]a1#[..] -[COMPILING] b v0.5.0 ([..]) -[COMPILING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.change_file( - "b/Cargo.toml", - &format!( - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies.a] - git = '{}' - "#, - a2.url() - ), - ); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `file://[..]a2` -[COMPILING] a v0.5.1 ([..]a2#[..] -[COMPILING] b v0.5.0 ([..]) -[COMPILING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn dont_require_submodules_are_checked_out() { - let p = project().build(); - let git1 = git::new("dep1", |p| { - p.file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - build = "build.rs" - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .file("a/foo", "") - }); - let git2 = git::new("dep2", |p| p); - - let repo = git2::Repository::open(&git1.root()).unwrap(); - let url = path2url(git2.root()).to_string(); - git::add_submodule(&repo, &url, Path::new("a/submodule")); - git::commit(&repo); - - git2::Repository::init(&p.root()).unwrap(); - let url = path2url(git1.root()).to_string(); - let dst = paths::home().join("foo"); - git2::Repository::clone(&url, &dst).unwrap(); - - git1.cargo("build -v").cwd(&dst).run(); -} - -#[cargo_test] -fn doctest_same_name() { - let a2 = git::new("a2", |p| { - p.file("Cargo.toml", &basic_manifest("a", "0.5.0")) - .file("src/lib.rs", "pub fn a2() {}") - }); - - let a1 = git::new("a1", |p| { - p.file( - "Cargo.toml", - &format!( - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - [dependencies] - a = {{ git = '{}' }} - "#, - a2.url() - ), - ) - .file("src/lib.rs", "extern crate a; pub fn a1() {}") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = {{ git = '{}' }} - "#, - a1.url() - ), - ) - .file( - "src/lib.rs", - r#" - #[macro_use] - extern crate a; - "#, - ) - .build(); - - p.cargo("test -v").run(); -} - -#[cargo_test] -fn lints_are_suppressed() { - let a = git::new("a", |p| { - p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file( - "src/lib.rs", - " - use std::option; - ", - ) - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = {{ git = '{}' }} - "#, - a.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `[..]` -[COMPILING] a v0.5.0 ([..]) -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn denied_lints_are_allowed() { - let a = git::new("a", |p| { - p.file("Cargo.toml", &basic_manifest("a", "0.5.0")).file( - "src/lib.rs", - " - #![deny(warnings)] - use std::option; - ", - ) - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = {{ git = '{}' }} - "#, - a.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `[..]` -[COMPILING] a v0.5.0 ([..]) -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn add_a_git_dep() { - let git = git::new("git", |p| { - p.file("Cargo.toml", &basic_manifest("git", "0.5.0")) - .file("src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = {{ path = 'a' }} - git = {{ git = '{}' }} - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - p.change_file( - "a/Cargo.toml", - &format!( - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies] - git = {{ git = '{}' }} - "#, - git.url() - ), - ); - - p.cargo("build").run(); -} - -#[cargo_test] -fn two_at_rev_instead_of_tag() { - let git = git::new("git", |p| { - p.file("Cargo.toml", &basic_manifest("git1", "0.5.0")) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("git2", "0.5.0")) - .file("a/src/lib.rs", "") - }); - - // Make a tag corresponding to the current HEAD - let repo = git2::Repository::open(&git.root()).unwrap(); - let head = repo.head().unwrap().target().unwrap(); - repo.tag( - "v0.1.0", - &repo.find_object(head, None).unwrap(), - &repo.signature().unwrap(), - "make a new tag", - false, - ) - .unwrap(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - git1 = {{ git = '{0}', rev = 'v0.1.0' }} - git2 = {{ git = '{0}', rev = 'v0.1.0' }} - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("generate-lockfile").run(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn include_overrides_gitignore() { - // Make sure that `package.include` takes precedence over .gitignore. - let p = git::new("foo", |repo| { - repo.file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.5.0" - include = ["src/lib.rs", "ignored.txt", "Cargo.toml"] - "#, - ) - .file( - ".gitignore", - r#" - /target - Cargo.lock - ignored.txt - "#, - ) - .file("src/lib.rs", "") - .file("ignored.txt", "") - .file("build.rs", "fn main() {}") - }); - - p.cargo("build").run(); - p.change_file("ignored.txt", "Trigger rebuild."); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([..]) -[RUNNING] `[..]build-script-build[..]` -[RUNNING] `rustc --crate-name foo src/lib.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("package --list --allow-dirty") - .with_stdout( - "\ -Cargo.toml -Cargo.toml.orig -ignored.txt -src/lib.rs -", - ) - .run(); -} - -#[cargo_test] -fn invalid_git_dependency_manifest() { - let project = project(); - let git_project = git::new("dep1", |project| { - project - .file( - "Cargo.toml", - r#" - [project] - - name = "dep1" - version = "0.5.0" - authors = ["carlhuda@example.com"] - categories = ["algorithms"] - categories = ["algorithms"] - - [lib] - - name = "dep1" - "#, - ) - .file( - "src/dep1.rs", - r#" - pub fn hello() -> &'static str { - "hello world" - } - "#, - ) - }); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.dep1] - - git = '{}' - "#, - git_project.url() - ), - ) - .file( - "src/main.rs", - &main_file(r#""{}", dep1::hello()"#, &["dep1"]), - ) - .build(); - - let git_root = git_project.root(); - - project - .cargo("build") - .with_status(101) - .with_stderr(&format!( - "\ -[UPDATING] git repository `{}` -[ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 ([..])` - -Caused by: - failed to load source for dependency `dep1` - -Caused by: - Unable to update {} - -Caused by: - failed to parse manifest at `[..]` - -Caused by: - could not parse input as TOML - -Caused by: - duplicate key: `categories` for key `project` at line 10 column 21", - path2url(&git_root), - path2url(&git_root), - )) - .run(); -} - -#[cargo_test] -fn failed_submodule_checkout() { - let project = project(); - let git_project = git::new("dep1", |project| { - project.file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - }); - - let git_project2 = git::new("dep2", |project| project.file("lib.rs", "")); - - let listener = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = listener.local_addr().unwrap(); - let done = Arc::new(AtomicBool::new(false)); - let done2 = done.clone(); - - let t = thread::spawn(move || { - while !done2.load(Ordering::SeqCst) { - if let Ok((mut socket, _)) = listener.accept() { - drop(socket.write_all(b"foo\r\n")); - } - } - }); - - let repo = git2::Repository::open(&git_project2.root()).unwrap(); - let url = format!("https://{}:{}/", addr.ip(), addr.port()); - { - let mut s = repo.submodule(&url, Path::new("bar"), false).unwrap(); - let subrepo = s.open().unwrap(); - let mut cfg = subrepo.config().unwrap(); - cfg.set_str("user.email", "foo@bar.com").unwrap(); - cfg.set_str("user.name", "Foo Bar").unwrap(); - git::commit(&subrepo); - s.add_finalize().unwrap(); - } - git::commit(&repo); - drop((repo, url)); - - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let url = path2url(git_project2.root()).to_string(); - git::add_submodule(&repo, &url, Path::new("src")); - git::commit(&repo); - drop(repo); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - dep1 = {{ git = '{}' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - project - .cargo("build") - .with_status(101) - .with_stderr_contains(" failed to update submodule `src`") - .with_stderr_contains(" failed to update submodule `bar`") - .run(); - project - .cargo("build") - .with_status(101) - .with_stderr_contains(" failed to update submodule `src`") - .with_stderr_contains(" failed to update submodule `bar`") - .run(); - - done.store(true, Ordering::SeqCst); - drop(TcpStream::connect(&addr)); - t.join().unwrap(); -} - -#[cargo_test] -fn use_the_cli() { - if disable_git_cli() { - return; - } - let project = project(); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - .file("src/lib.rs", "") - }); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - dep1 = {{ git = '{}' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - " - [net] - git-fetch-with-cli = true - ", - ) - .build(); - - let stderr = "\ -[UPDATING] git repository `[..]` -[RUNNING] `git fetch [..]` -[COMPILING] dep1 [..] -[RUNNING] `rustc [..]` -[COMPILING] foo [..] -[RUNNING] `rustc [..]` -[FINISHED] [..] -"; - - project.cargo("build -v").with_stderr(stderr).run(); -} - -#[cargo_test] -fn templatedir_doesnt_cause_problems() { - let git_project2 = git::new("dep2", |project| { - project - .file("Cargo.toml", &basic_manifest("dep2", "0.5.0")) - .file("src/lib.rs", "") - }); - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - .file("src/lib.rs", "") - }); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "fo" - version = "0.5.0" - authors = [] - - [dependencies] - dep1 = {{ git = '{}' }} - "#, - git_project.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - fs::write( - paths::home().join(".gitconfig"), - format!( - r#" - [init] - templatedir = {} - "#, - git_project2 - .url() - .to_file_path() - .unwrap() - .to_str() - .unwrap() - .replace("\\", "/") - ), - ) - .unwrap(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn git_with_cli_force() { - if disable_git_cli() { - return; - } - // Supports a force-pushed repo. - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file("src/lib.rs", r#"pub fn f() { println!("one"); }"#) - }); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.1" - edition = "2018" - - [dependencies] - dep1 = {{ git = "{}" }} - "#, - git_project.url() - ), - ) - .file("src/main.rs", "fn main() { dep1::f(); }") - .file( - ".cargo/config", - " - [net] - git-fetch-with-cli = true - ", - ) - .build(); - p.cargo("build").run(); - p.rename_run("foo", "foo1").with_stdout("one").run(); - - // commit --amend a change that will require a force fetch. - let repo = git2::Repository::open(&git_project.root()).unwrap(); - git_project.change_file("src/lib.rs", r#"pub fn f() { println!("two"); }"#); - git::add(&repo); - let id = repo.refname_to_id("HEAD").unwrap(); - let commit = repo.find_commit(id).unwrap(); - let tree_id = t!(t!(repo.index()).write_tree()); - t!(commit.amend( - Some("HEAD"), - None, - None, - None, - None, - Some(&t!(repo.find_tree(tree_id))) - )); - // Perform the fetch. - p.cargo("update").run(); - p.cargo("build").run(); - p.rename_run("foo", "foo2").with_stdout("two").run(); -} - -#[cargo_test] -fn git_fetch_cli_env_clean() { - if disable_git_cli() { - return; - } - // This tests that git-fetch-with-cli works when GIT_DIR environment - // variable is set (for whatever reason). - let git_dep = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - .file("src/lib.rs", "") - }); - - let git_proj = git::new("foo", |project| { - project - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - dep1 = {{ git = '{}' }} - "#, - git_dep.url() - ), - ) - .file("src/lib.rs", "pub extern crate dep1;") - .file( - ".cargo/config", - " - [net] - git-fetch-with-cli = true - ", - ) - }); - - // The directory set here isn't too important. Pointing to our own git - // directory causes git to be confused and fail. Can also point to an - // empty directory, or a nonexistent one. - git_proj - .cargo("fetch") - .env("GIT_DIR", git_proj.root().join(".git")) - .run(); -} - -#[cargo_test] -fn dirty_submodule() { - // `cargo package` warns for dirty file in submodule. - let (git_project, repo) = git::new_repo("foo", |project| { - project - .file("Cargo.toml", &basic_manifest("foo", "0.5.0")) - // This is necessary because `git::add` is too eager. - .file(".gitignore", "/target") - }); - let git_project2 = git::new("src", |project| { - project.no_manifest().file("lib.rs", "pub fn f() {}") - }); - - let url = path2url(git_project2.root()).to_string(); - git::add_submodule(&repo, &url, Path::new("src")); - - // Submodule added, but not committed. - git_project - .cargo("package --no-verify") - .with_status(101) - .with_stderr( - "\ -[WARNING] manifest has no [..] -See [..] -[ERROR] 1 files in the working directory contain changes that were not yet committed into git: - -.gitmodules - -to proceed despite [..] -", - ) - .run(); - - git::commit(&repo); - git_project.cargo("package --no-verify").run(); - - // Modify file, check for warning. - git_project.change_file("src/lib.rs", ""); - git_project - .cargo("package --no-verify") - .with_status(101) - .with_stderr( - "\ -[WARNING] manifest has no [..] -See [..] -[ERROR] 1 files in the working directory contain changes that were not yet committed into git: - -src/lib.rs - -to proceed despite [..] -", - ) - .run(); - // Commit the change. - let sub_repo = git2::Repository::open(git_project.root().join("src")).unwrap(); - git::add(&sub_repo); - git::commit(&sub_repo); - git::add(&repo); - git::commit(&repo); - git_project.cargo("package --no-verify").run(); - - // Try with a nested submodule. - let git_project3 = git::new("bar", |project| project.no_manifest().file("mod.rs", "")); - let url = path2url(git_project3.root()).to_string(); - git::add_submodule(&sub_repo, &url, Path::new("bar")); - git_project - .cargo("package --no-verify") - .with_status(101) - .with_stderr( - "\ -[WARNING] manifest has no [..] -See [..] -[ERROR] 1 files in the working directory contain changes that were not yet committed into git: - -src/.gitmodules - -to proceed despite [..] -", - ) - .run(); - - // Commit the submodule addition. - git::commit(&sub_repo); - git::add(&repo); - git::commit(&repo); - git_project.cargo("package --no-verify").run(); - // Modify within nested submodule. - git_project.change_file("src/bar/new_file.rs", "//test"); - git_project - .cargo("package --no-verify") - .with_status(101) - .with_stderr( - "\ -[WARNING] manifest has no [..] -See [..] -[ERROR] 1 files in the working directory contain changes that were not yet committed into git: - -src/bar/new_file.rs - -to proceed despite [..] -", - ) - .run(); - // And commit the change. - let sub_sub_repo = git2::Repository::open(git_project.root().join("src/bar")).unwrap(); - git::add(&sub_sub_repo); - git::commit(&sub_sub_repo); - git::add(&sub_repo); - git::commit(&sub_repo); - git::add(&repo); - git::commit(&repo); - git_project.cargo("package --no-verify").run(); -} - -#[cargo_test] -fn default_not_master() { - let project = project(); - - // Create a repository with a `master` branch, but switch the head to a - // branch called `main` at the same time. - let (git_project, repo) = git::new_repo("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file("src/lib.rs", "pub fn foo() {}") - }); - let head_id = repo.head().unwrap().target().unwrap(); - let head = repo.find_commit(head_id).unwrap(); - repo.branch("main", &head, false).unwrap(); - repo.set_head("refs/heads/main").unwrap(); - - // Then create a commit on the new `main` branch so `master` and `main` - // differ. - git_project.change_file("src/lib.rs", "pub fn bar() {}"); - git::add(&repo); - git::commit(&repo); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - [dependencies] - dep1 = {{ git = '{}' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "pub fn foo() { dep1::bar() }") - .build(); - - project - .cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `[..]` -[COMPILING] dep1 v0.5.0 ([..]) -[COMPILING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn historical_lockfile_works() { - let project = project(); - - let (git_project, repo) = git::new_repo("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file("src/lib.rs", "") - }); - let head_id = repo.head().unwrap().target().unwrap(); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - - [dependencies] - dep1 = {{ git = '{}', branch = 'master' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - project.cargo("build").run(); - project.change_file( - "Cargo.lock", - &format!( - r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "dep1" -version = "0.5.0" -source = "git+{}#{}" - -[[package]] -name = "foo" -version = "0.5.0" -dependencies = [ - "dep1", -] -"#, - git_project.url(), - head_id - ), - ); - project - .cargo("build") - .with_stderr("[FINISHED] [..]\n") - .run(); -} - -#[cargo_test] -fn historical_lockfile_works_with_vendor() { - let project = project(); - - let (git_project, repo) = git::new_repo("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file("src/lib.rs", "") - }); - let head_id = repo.head().unwrap().target().unwrap(); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - - [dependencies] - dep1 = {{ git = '{}', branch = 'master' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - let output = project.cargo("vendor").exec_with_output().unwrap(); - project.change_file(".cargo/config", str::from_utf8(&output.stdout).unwrap()); - project.change_file( - "Cargo.lock", - &format!( - r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "dep1" -version = "0.5.0" -source = "git+{}#{}" - -[[package]] -name = "foo" -version = "0.5.0" -dependencies = [ - "dep1", -] -"#, - git_project.url(), - head_id - ), - ); - project.cargo("build").run(); -} - -#[cargo_test] -fn two_dep_forms() { - let project = project(); - - let (git_project, _repo) = git::new_repo("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file("src/lib.rs", "") - }); - - let project = project - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - [dependencies] - dep1 = {{ git = '{}', branch = 'master' }} - a = {{ path = 'a' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - &format!( - r#" - [project] - name = "a" - version = "0.5.0" - [dependencies] - dep1 = {{ git = '{}' }} - "#, - git_project.url() - ), - ) - .file("a/src/lib.rs", "") - .build(); - - // This'll download the git repository twice, one with HEAD and once with - // the master branch. Then it'll compile 4 crates, the 2 git deps, then - // the two local deps. - project - .cargo("build") - .with_stderr( - "\ -[UPDATING] [..] -[UPDATING] [..] -[COMPILING] [..] -[COMPILING] [..] -[COMPILING] [..] -[COMPILING] [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn metadata_master_consistency() { - // SourceId consistency in the `cargo metadata` output when `master` is - // explicit or implicit, using new or old Cargo.lock. - let (git_project, git_repo) = git::new_repo("bar", |project| { - project - .file("Cargo.toml", &basic_manifest("bar", "1.0.0")) - .file("src/lib.rs", "") - }); - let bar_hash = git_repo.head().unwrap().target().unwrap().to_string(); - - // Explicit branch="master" with a lock file created before 1.47 (does not contain ?branch=master). - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = {{ git = "{}", branch = "master" }} - "#, - git_project.url() - ), - ) - .file( - "Cargo.lock", - &format!( - r#" - [[package]] - name = "bar" - version = "1.0.0" - source = "git+{}#{}" - - [[package]] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar", - ] - "#, - git_project.url(), - bar_hash, - ), - ) - .file("src/lib.rs", "") - .build(); - - let metadata = |bar_source| -> String { - r#" - { - "packages": [ - { - "name": "bar", - "version": "1.0.0", - "id": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)", - "license": null, - "license_file": null, - "description": null, - "source": "__BAR_SOURCE__#__BAR_HASH__", - "dependencies": [], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - }, - { - "name": "foo", - "version": "0.1.0", - "id": "foo 0.1.0 [..]", - "license": null, - "license_file": null, - "description": null, - "source": null, - "dependencies": [ - { - "name": "bar", - "source": "__BAR_SOURCE__", - "req": "*", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": null - } - ], - "targets": "{...}", - "features": {}, - "manifest_path": "[..]", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - ], - "workspace_members": [ - "foo 0.1.0 [..]" - ], - "resolve": { - "nodes": [ - { - "id": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "foo 0.1.0 [..]", - "dependencies": [ - "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)" - ], - "deps": [ - { - "name": "bar", - "pkg": "bar 1.0.0 (__BAR_SOURCE__#__BAR_HASH__)", - "dep_kinds": [ - { - "kind": null, - "target": null - } - ] - } - ], - "features": [] - } - ], - "root": "foo 0.1.0 [..]" - }, - "target_directory": "[..]", - "version": 1, - "workspace_root": "[..]", - "metadata": null - } - "# - .replace("__BAR_SOURCE__", bar_source) - .replace("__BAR_HASH__", &bar_hash) - }; - - let bar_source = format!("git+{}?branch=master", git_project.url()); - p.cargo("metadata").with_json(&metadata(&bar_source)).run(); - - // Conversely, remove branch="master" from Cargo.toml, but use a new Cargo.lock that has ?branch=master. - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = {{ git = "{}" }} - "#, - git_project.url() - ), - ) - .file( - "Cargo.lock", - &format!( - r#" - [[package]] - name = "bar" - version = "1.0.0" - source = "git+{}?branch=master#{}" - - [[package]] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar", - ] - "#, - git_project.url(), - bar_hash - ), - ) - .file("src/lib.rs", "") - .build(); - - // No ?branch=master! - let bar_source = format!("git+{}", git_project.url()); - p.cargo("metadata").with_json(&metadata(&bar_source)).run(); -} - -#[cargo_test] -fn git_with_force_push() { - // Checks that cargo can handle force-pushes to git repos. - // This works by having a git dependency that is updated with an amend - // commit, and tries with various forms (default branch, branch, rev, - // tag). - let main = |text| format!(r#"pub fn f() {{ println!("{}"); }}"#, text); - let (git_project, repo) = git::new_repo("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file("src/lib.rs", &main("one")) - }); - let manifest = |extra| { - format!( - r#" - [project] - name = "foo" - version = "0.0.1" - edition = "2018" - - [dependencies] - dep1 = {{ git = "{}"{} }} - "#, - git_project.url(), - extra - ) - }; - let p = project() - .file("Cargo.toml", &manifest("")) - .file("src/main.rs", "fn main() { dep1::f(); }") - .build(); - // Download the original and make sure it is OK. - p.cargo("build").run(); - p.rename_run("foo", "foo1").with_stdout("one").run(); - - let find_head = || t!(t!(repo.head()).peel_to_commit()); - - let amend_commit = |text| { - // commit --amend a change that will require a force fetch. - git_project.change_file("src/lib.rs", &main(text)); - git::add(&repo); - let commit = find_head(); - let tree_id = t!(t!(repo.index()).write_tree()); - t!(commit.amend( - Some("HEAD"), - None, - None, - None, - None, - Some(&t!(repo.find_tree(tree_id))) - )); - }; - - let mut rename_annoyance = 1; - - let mut verify = |text: &str| { - // Perform the fetch. - p.cargo("update").run(); - p.cargo("build").run(); - rename_annoyance += 1; - p.rename_run("foo", &format!("foo{}", rename_annoyance)) - .with_stdout(text) - .run(); - }; - - amend_commit("two"); - verify("two"); - - // Try with a rev. - let head1 = find_head().id().to_string(); - let extra = format!(", rev = \"{}\"", head1); - p.change_file("Cargo.toml", &manifest(&extra)); - verify("two"); - amend_commit("three"); - let head2 = find_head().id().to_string(); - assert_ne!(&head1, &head2); - let extra = format!(", rev = \"{}\"", head2); - p.change_file("Cargo.toml", &manifest(&extra)); - verify("three"); - - // Try with a tag. - git::tag(&repo, "my-tag"); - p.change_file("Cargo.toml", &manifest(", tag = \"my-tag\"")); - verify("three"); - amend_commit("tag-three"); - let head = t!(t!(repo.head()).peel(git2::ObjectType::Commit)); - t!(repo.tag("my-tag", &head, &t!(repo.signature()), "move tag", true)); - verify("tag-three"); - - // Try with a branch. - let br = t!(repo.branch("awesome-stuff", &find_head(), false)); - t!(repo.checkout_tree(&t!(br.get().peel(git2::ObjectType::Tree)), None)); - t!(repo.set_head("refs/heads/awesome-stuff")); - git_project.change_file("src/lib.rs", &main("awesome-three")); - git::add(&repo); - git::commit(&repo); - p.change_file("Cargo.toml", &manifest(", branch = \"awesome-stuff\"")); - verify("awesome-three"); - amend_commit("awesome-four"); - verify("awesome-four"); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git_auth.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git_auth.rs deleted file mode 100644 index 85702290a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git_auth.rs +++ /dev/null @@ -1,398 +0,0 @@ -//! Tests for git authentication. - -use std::collections::HashSet; -use std::io::prelude::*; -use std::io::BufReader; -use std::net::{SocketAddr, TcpListener}; -use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; -use std::sync::Arc; -use std::thread::{self, JoinHandle}; - -use cargo_test_support::paths; -use cargo_test_support::{basic_manifest, project}; - -fn setup_failed_auth_test() -> (SocketAddr, JoinHandle<()>, Arc) { - let server = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = server.local_addr().unwrap(); - - fn headers(rdr: &mut dyn BufRead) -> HashSet { - let valid = ["GET", "Authorization", "Accept"]; - rdr.lines() - .map(|s| s.unwrap()) - .take_while(|s| s.len() > 2) - .map(|s| s.trim().to_string()) - .filter(|s| valid.iter().any(|prefix| s.starts_with(*prefix))) - .collect() - } - - let connections = Arc::new(AtomicUsize::new(0)); - let connections2 = connections.clone(); - let t = thread::spawn(move || { - let mut conn = BufReader::new(server.accept().unwrap().0); - let req = headers(&mut conn); - connections2.fetch_add(1, SeqCst); - conn.get_mut() - .write_all( - b"HTTP/1.1 401 Unauthorized\r\n\ - WWW-Authenticate: Basic realm=\"wheee\"\r\n\ - Content-Length: 0\r\n\ - \r\n", - ) - .unwrap(); - assert_eq!( - req, - vec![ - "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", - "Accept: */*", - ] - .into_iter() - .map(|s| s.to_string()) - .collect() - ); - - let req = headers(&mut conn); - connections2.fetch_add(1, SeqCst); - conn.get_mut() - .write_all( - b"HTTP/1.1 401 Unauthorized\r\n\ - WWW-Authenticate: Basic realm=\"wheee\"\r\n\ - \r\n", - ) - .unwrap(); - assert_eq!( - req, - vec![ - "GET /foo/bar/info/refs?service=git-upload-pack HTTP/1.1", - "Authorization: Basic Zm9vOmJhcg==", - "Accept: */*", - ] - .into_iter() - .map(|s| s.to_string()) - .collect() - ); - }); - - let script = project() - .at("script") - .file("Cargo.toml", &basic_manifest("script", "0.1.0")) - .file( - "src/main.rs", - r#" - fn main() { - println!("username=foo"); - println!("password=bar"); - } - "#, - ) - .build(); - - script.cargo("build -v").run(); - let script = script.bin("script"); - - let config = paths::home().join(".gitconfig"); - let mut config = git2::Config::open(&config).unwrap(); - config - .set_str( - "credential.helper", - // This is a bash script so replace `\` with `/` for Windows - &script.display().to_string().replace("\\", "/"), - ) - .unwrap(); - (addr, t, connections) -} - -// Tests that HTTP auth is offered from `credential.helper`. -#[cargo_test] -fn http_auth_offered() { - let (addr, t, connections) = setup_failed_auth_test(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "http://127.0.0.1:{}/foo/bar" - "#, - addr.port() - ), - ) - .file("src/main.rs", "") - .file( - ".cargo/config", - "[net] - retry = 0 - ", - ) - .build(); - - // This is a "contains" check because the last error differs by platform, - // may span multiple lines, and isn't relevant to this test. - p.cargo("build") - .with_status(101) - .with_stderr_contains(&format!( - "\ -[UPDATING] git repository `http://{addr}/foo/bar` -[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 [..]` - -Caused by: - failed to load source for dependency `bar` - -Caused by: - Unable to update http://{addr}/foo/bar - -Caused by: - failed to clone into: [..] - -Caused by: - failed to authenticate when downloading repository - - * attempted to find username/password via `credential.helper`, but [..] - - if the git CLI succeeds then `net.git-fetch-with-cli` may help here - https://[..] - -Caused by: -", - addr = addr - )) - .run(); - - assert_eq!(connections.load(SeqCst), 2); - t.join().ok().unwrap(); -} - -// Boy, sure would be nice to have a TLS implementation in rust! -#[cargo_test] -fn https_something_happens() { - let server = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = server.local_addr().unwrap(); - let t = thread::spawn(move || { - let mut conn = server.accept().unwrap().0; - drop(conn.write(b"1234")); - drop(conn.shutdown(std::net::Shutdown::Write)); - drop(conn.read(&mut [0; 16])); - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "https://127.0.0.1:{}/foo/bar" - "#, - addr.port() - ), - ) - .file("src/main.rs", "") - .file( - ".cargo/config", - "[net] - retry = 0 - ", - ) - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains(&format!( - "[UPDATING] git repository `https://{addr}/foo/bar`", - addr = addr - )) - .with_stderr_contains(&format!( - "\ -Caused by: - {errmsg} -", - errmsg = if cfg!(windows) { - "[..]failed to send request: [..]" - } else if cfg!(target_os = "macos") { - // macOS is difficult to tests as some builds may use Security.framework, - // while others may use OpenSSL. In that case, let's just not verify the error - // message here. - "[..]" - } else { - "[..]SSL error: [..]" - } - )) - .run(); - - t.join().ok().unwrap(); -} - -// It would sure be nice to have an SSH implementation in Rust! -#[cargo_test] -fn ssh_something_happens() { - let server = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = server.local_addr().unwrap(); - let t = thread::spawn(move || { - drop(server.accept().unwrap()); - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "ssh://127.0.0.1:{}/foo/bar" - "#, - addr.port() - ), - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains(&format!( - "[UPDATING] git repository `ssh://{addr}/foo/bar`", - addr = addr - )) - .with_stderr_contains( - "\ -Caused by: - [..]failed to start SSH session: Failed getting banner[..] -", - ) - .run(); - t.join().ok().unwrap(); -} - -#[cargo_test] -fn net_err_suggests_fetch_with_cli() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - authors = [] - - [dependencies] - foo = { git = "ssh://needs-proxy.invalid/git" } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[UPDATING] git repository `ssh://needs-proxy.invalid/git` -warning: spurious network error[..] -warning: spurious network error[..] -[ERROR] failed to get `foo` as a dependency of package `foo v0.0.0 [..]` - -Caused by: - failed to load source for dependency `foo` - -Caused by: - Unable to update ssh://needs-proxy.invalid/git - -Caused by: - failed to clone into: [..] - -Caused by: - network failure seems to have happened - if a proxy or similar is necessary `net.git-fetch-with-cli` may help here - https://[..] - -Caused by: - failed to resolve address for needs-proxy.invalid[..] -", - ) - .run(); - - p.change_file( - ".cargo/config", - " - [net] - git-fetch-with-cli = true - ", - ); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains("[..]Unable to update[..]") - .with_stderr_does_not_contain("[..]try enabling `git-fetch-with-cli`[..]") - .run(); -} - -#[cargo_test] -fn instead_of_url_printed() { - let (addr, t, _connections) = setup_failed_auth_test(); - let config = paths::home().join(".gitconfig"); - let mut config = git2::Config::open(&config).unwrap(); - config - .set_str( - &format!("url.http://{}/.insteadOf", addr), - "https://foo.bar/", - ) - .unwrap(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "https://foo.bar/foo/bar" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr(&format!( - "\ -[UPDATING] git repository `https://foo.bar/foo/bar` -[ERROR] failed to get `bar` as a dependency of package `foo [..]` - -Caused by: - failed to load source for dependency `bar` - -Caused by: - Unable to update https://foo.bar/foo/bar - -Caused by: - failed to clone into: [..] - -Caused by: - failed to authenticate when downloading repository: http://{addr}/foo/bar - - * attempted to find username/password via `credential.helper`, but maybe the found credentials were incorrect - - if the git CLI succeeds then `net.git-fetch-with-cli` may help here - https://[..] - -Caused by: - [..] -", - addr = addr - )) - .run(); - - t.join().ok().unwrap(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git_gc.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git_gc.rs deleted file mode 100644 index 38b4cd023..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/git_gc.rs +++ /dev/null @@ -1,115 +0,0 @@ -//! Tests for git garbage collection. - -use std::env; -use std::ffi::OsStr; -use std::path::PathBuf; -use std::process::Command; - -use cargo_test_support::git; -use cargo_test_support::paths; -use cargo_test_support::project; -use cargo_test_support::registry::Package; - -use url::Url; - -fn find_index() -> PathBuf { - let dir = paths::home().join(".cargo/registry/index"); - dir.read_dir().unwrap().next().unwrap().unwrap().path() -} - -fn run_test(path_env: Option<&OsStr>) { - const N: usize = 50; - - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .build(); - Package::new("bar", "0.1.0").publish(); - - foo.cargo("build").run(); - - let index = find_index(); - let path = paths::home().join("tmp"); - let url = Url::from_file_path(&path).unwrap().to_string(); - let repo = git2::Repository::init(&path).unwrap(); - let index = git2::Repository::open(&index).unwrap(); - let mut cfg = repo.config().unwrap(); - cfg.set_str("user.email", "foo@bar.com").unwrap(); - cfg.set_str("user.name", "Foo Bar").unwrap(); - let mut cfg = index.config().unwrap(); - cfg.set_str("user.email", "foo@bar.com").unwrap(); - cfg.set_str("user.name", "Foo Bar").unwrap(); - - for _ in 0..N { - git::commit(&repo); - index - .remote_anonymous(&url) - .unwrap() - .fetch(&["refs/heads/master:refs/remotes/foo/master"], None, None) - .unwrap(); - } - drop((repo, index)); - Package::new("bar", "0.1.1").publish(); - - let before = find_index() - .join(".git/objects/pack") - .read_dir() - .unwrap() - .count(); - assert!(before > N); - - let mut cmd = foo.cargo("update"); - cmd.env("__CARGO_PACKFILE_LIMIT", "10"); - if let Some(path) = path_env { - cmd.env("PATH", path); - } - cmd.env("CARGO_LOG", "trace"); - cmd.run(); - let after = find_index() - .join(".git/objects/pack") - .read_dir() - .unwrap() - .count(); - assert!( - after < before, - "packfiles before: {}\n\ - packfiles after: {}", - before, - after - ); -} - -#[cargo_test] -fn use_git_gc() { - if Command::new("git").arg("--version").output().is_err() { - return; - } - run_test(None); -} - -#[cargo_test] -fn avoid_using_git() { - let path = env::var_os("PATH").unwrap_or_default(); - let mut paths = env::split_paths(&path).collect::>(); - let idx = paths - .iter() - .position(|p| p.join("git").exists() || p.join("git.exe").exists()); - match idx { - Some(i) => { - paths.remove(i); - } - None => return, - } - run_test(Some(&env::join_paths(&paths).unwrap())); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/glob_targets.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/glob_targets.rs deleted file mode 100644 index 643572e42..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/glob_targets.rs +++ /dev/null @@ -1,539 +0,0 @@ -//! Tests for target filter flags rith glob patterns. - -use cargo_test_support::{project, Project}; - -#[cargo_test] -fn build_example() { - full_project() - .cargo("build -v --example 'ex*1'") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name example1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_bin() { - full_project() - .cargo("build -v --bin 'bi*1'") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name bin1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_bench() { - full_project() - .cargo("build -v --bench 'be*1'") - .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_test() { - full_project() - .cargo("build -v --test 'te*1'") - .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn check_example() { - full_project() - .cargo("check -v --example 'ex*1'") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name example1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn check_bin() { - full_project() - .cargo("check -v --bin 'bi*1'") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name bin1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn check_bench() { - full_project() - .cargo("check -v --bench 'be*1'") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name bench1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn check_test() { - full_project() - .cargo("check -v --test 'te*1'") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name test1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn doc_bin() { - full_project() - .cargo("doc -v --bin 'bi*1'") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[RUNNING] `rustdoc --crate-type bin --crate-name bin1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fix_example() { - full_project() - .cargo("fix -v --example 'ex*1' --allow-no-vcs") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[RUNNING] `[..] rustc --crate-name example1 [..]` -[FIXING] examples/example1.rs -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fix_bin() { - full_project() - .cargo("fix -v --bin 'bi*1' --allow-no-vcs") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[RUNNING] `[..] rustc --crate-name bin1 [..]` -[FIXING] src/bin/bin1.rs -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fix_bench() { - full_project() - .cargo("fix -v --bench 'be*1' --allow-no-vcs") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[RUNNING] `[..] rustc --crate-name bench1 [..]` -[FIXING] benches/bench1.rs -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fix_test() { - full_project() - .cargo("fix -v --test 'te*1' --allow-no-vcs") - .with_stderr( - "\ -[CHECKING] foo v0.0.1 ([CWD]) -[RUNNING] `[..] rustc --crate-name test1 [..]` -[FIXING] tests/test1.rs -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn run_example_and_bin() { - let p = full_project(); - p.cargo("run -v --bin 'bi*1'") - .with_status(101) - .with_stderr("[ERROR] `cargo run` does not support glob patterns on target selection") - .run(); - - p.cargo("run -v --example 'ex*1'") - .with_status(101) - .with_stderr("[ERROR] `cargo run` does not support glob patterns on target selection") - .run(); -} - -#[cargo_test] -fn test_example() { - full_project() - .cargo("test -v --example 'ex*1'") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name example1 [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..]example1[..] -", - ) - .run(); -} - -#[cargo_test] -fn test_bin() { - full_project() - .cargo("test -v --bin 'bi*1'") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name bin1 [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..]bin1[..] -", - ) - .run(); -} - -#[cargo_test] -fn test_bench() { - full_project() - .cargo("test -v --bench 'be*1'") - .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..]bench1[..] -", - ) - .run(); -} - -#[cargo_test] -fn test_test() { - full_project() - .cargo("test -v --test 'te*1'") - .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..]test1[..] -", - ) - .run(); -} - -#[cargo_test] -fn bench_example() { - full_project() - .cargo("bench -v --example 'ex*1'") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name example1 [..]` -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `[..]example1[..] --bench` -", - ) - .run(); -} - -#[cargo_test] -fn bench_bin() { - full_project() - .cargo("bench -v --bin 'bi*1'") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name bin1 [..]` -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `[..]bin1[..] --bench` -", - ) - .run(); -} - -#[cargo_test] -fn bench_bench() { - full_project() - .cargo("bench -v --bench 'be*1'") - .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `[..]bench1[..] --bench` -", - ) - .run(); -} - -#[cargo_test] -fn bench_test() { - full_project() - .cargo("bench -v --test 'te*1'") - .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `[..]test1[..] --bench` -", - ) - .run(); -} - -#[cargo_test] -fn install_example() { - full_project() - .cargo("install --path . --example 'ex*1'") - .with_stderr( - "\ -[INSTALLING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]/home/.cargo/bin/example1[EXE] -[INSTALLED] package `foo v0.0.1 ([CWD])` (executable `example1[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); -} - -#[cargo_test] -fn install_bin() { - full_project() - .cargo("install --path . --bin 'bi*1'") - .with_stderr( - "\ -[INSTALLING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]/home/.cargo/bin/bin1[EXE] -[INSTALLED] package `foo v0.0.1 ([CWD])` (executable `bin1[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_example() { - full_project() - .cargo("rustdoc -v --example 'ex*1'") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[RUNNING] `rustdoc --crate-type bin --crate-name example1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_bin() { - full_project() - .cargo("rustdoc -v --bin 'bi*1'") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[RUNNING] `rustdoc --crate-type bin --crate-name bin1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_bench() { - full_project() - .cargo("rustdoc -v --bench 'be*1'") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[RUNNING] `rustdoc --crate-type bin --crate-name bench1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_test() { - full_project() - .cargo("rustdoc -v --test 'te*1'") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[RUNNING] `rustdoc --crate-type bin --crate-name test1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustc_example() { - full_project() - .cargo("rustc -v --example 'ex*1'") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name example1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustc_bin() { - full_project() - .cargo("rustc -v --bin 'bi*1'") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name bin1 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustc_bench() { - full_project() - .cargo("rustc -v --bench 'be*1'") - .with_stderr_contains("[RUNNING] `rustc --crate-name bench1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustc_test() { - full_project() - .cargo("rustc -v --test 'te*1'") - .with_stderr_contains("[RUNNING] `rustc --crate-name test1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin2 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name bin1 [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name foo [..]`") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[RUNNING] `rustc --crate-name [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -fn full_project() -> Project { - project() - .file("examples/example1.rs", "fn main() { }") - .file("examples/example2.rs", "fn main() { }") - .file("benches/bench1.rs", "") - .file("benches/bench2.rs", "") - .file("tests/test1.rs", "") - .file("tests/test2.rs", "") - .file("src/main.rs", "fn main() { }") - .file("src/bin/bin1.rs", "fn main() { }") - .file("src/bin/bin2.rs", "fn main() { }") - .build() -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/help.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/help.rs deleted file mode 100644 index fdb527e76..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/help.rs +++ /dev/null @@ -1,219 +0,0 @@ -//! Tests for cargo's help output. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, cargo_exe, cargo_process, paths, process, project}; -use std::fs; -use std::path::Path; -use std::str::from_utf8; - -#[cargo_test] -fn help() { - cargo_process("").run(); - cargo_process("help").run(); - cargo_process("-h").run(); - cargo_process("help build").run(); - cargo_process("build -h").run(); - cargo_process("help help").run(); - // Ensure that help output goes to stdout, not stderr. - cargo_process("search --help").with_stderr("").run(); - cargo_process("search --help") - .with_stdout_contains("[..] --frozen [..]") - .run(); -} - -#[cargo_test] -fn help_external_subcommand() { - // Check that `help external-subcommand` forwards the --help flag to the - // given subcommand. - Package::new("cargo-fake-help", "1.0.0") - .file( - "src/main.rs", - r#" - fn main() { - if ::std::env::args().nth(2) == Some(String::from("--help")) { - println!("fancy help output"); - } - } - "#, - ) - .publish(); - cargo_process("install cargo-fake-help").run(); - cargo_process("help fake-help") - .with_stdout("fancy help output\n") - .run(); -} - -#[cargo_test] -fn z_flags_help() { - // Test that the output of `cargo -Z help` shows a different help screen with - // all the `-Z` flags. - cargo_process("-Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); -} - -fn help_with_man(display_command: &str) { - // Build a "man" process that just echoes the contents. - let p = project() - .at(display_command) - .file("Cargo.toml", &basic_manifest(display_command, "1.0.0")) - .file( - "src/main.rs", - &r#" - fn main() { - eprintln!("custom __COMMAND__"); - let path = std::env::args().skip(1).next().unwrap(); - let mut f = std::fs::File::open(path).unwrap(); - std::io::copy(&mut f, &mut std::io::stdout()).unwrap(); - } - "# - .replace("__COMMAND__", display_command), - ) - .build(); - p.cargo("build").run(); - - help_with_man_and_path(display_command, "build", "build", &p.target_debug_dir()); -} - -fn help_with_man_and_path( - display_command: &str, - subcommand: &str, - actual_subcommand: &str, - path: &Path, -) { - let contents = if display_command == "man" { - fs::read_to_string(format!("src/etc/man/cargo-{}.1", actual_subcommand)).unwrap() - } else { - fs::read_to_string(format!( - "src/doc/man/generated_txt/cargo-{}.txt", - actual_subcommand - )) - .unwrap() - }; - - let output = process(&cargo_exe()) - .arg("help") - .arg(subcommand) - .env("PATH", path) - .exec_with_output() - .unwrap(); - assert!(output.status.success()); - let stderr = from_utf8(&output.stderr).unwrap(); - if display_command.is_empty() { - assert_eq!(stderr, ""); - } else { - assert_eq!(stderr, format!("custom {}\n", display_command)); - } - let stdout = from_utf8(&output.stdout).unwrap(); - assert_eq!(stdout, contents); -} - -fn help_with_stdout_and_path(subcommand: &str, path: &Path) -> String { - let output = process(&cargo_exe()) - .arg("help") - .arg(subcommand) - .env("PATH", path) - .exec_with_output() - .unwrap(); - assert!(output.status.success()); - let stderr = from_utf8(&output.stderr).unwrap(); - assert_eq!(stderr, ""); - let stdout = from_utf8(&output.stdout).unwrap(); - stdout.to_string() -} - -#[cargo_test] -fn help_man() { - // Checks that `help command` displays the man page using the given command. - help_with_man("man"); - help_with_man("less"); - help_with_man("more"); - - // Check with no commands in PATH. - help_with_man_and_path("", "build", "build", Path::new("")); -} - -#[cargo_test] -fn help_alias() { - // Check that `help some_alias` will resolve. - help_with_man_and_path("", "b", "build", Path::new("")); - - let config = paths::root().join(".cargo/config"); - fs::create_dir_all(config.parent().unwrap()).unwrap(); - fs::write( - config, - r#" - [alias] - empty-alias = "" - simple-alias = "build" - complex-alias = ["build", "--release"] - "#, - ) - .unwrap(); - - // The `empty-alias` returns an error. - cargo_process("help empty-alias") - .env("PATH", Path::new("")) - .with_stderr_contains("[..]The subcommand 'empty-alias' wasn't recognized[..]") - .run_expect_error(); - - // Because `simple-alias` aliases a subcommand with no arguments, help shows the manpage. - help_with_man_and_path("", "simple-alias", "build", Path::new("")); - - // Help for `complex-alias` displays the full alias command. - let out = help_with_stdout_and_path("complex-alias", Path::new("")); - assert_eq!(out, "`complex-alias` is aliased to `build --release`\n"); -} - -#[cargo_test] -fn alias_z_flag_help() { - cargo_process("build -Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); - - cargo_process("run -Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); - - cargo_process("check -Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); - - cargo_process("test -Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); - - cargo_process("b -Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); - - cargo_process("r -Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); - - cargo_process("c -Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); - - cargo_process("t -Z help") - .with_stdout_contains( - " -Z allow-features[..]-- Allow *only* the listed unstable features", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/init.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/init.rs deleted file mode 100644 index c32167039..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/init.rs +++ /dev/null @@ -1,673 +0,0 @@ -//! Tests for the `cargo init` command. - -use cargo_test_support::{command_is_available, paths, Execs}; -use std::env; -use std::fs; -use std::process::Command; - -fn cargo_process(s: &str) -> Execs { - let mut execs = cargo_test_support::cargo_process(s); - execs.cwd(&paths::root()).env("HOME", &paths::home()); - execs -} - -fn mercurial_available() -> bool { - let result = Command::new("hg") - .arg("--version") - .output() - .map(|o| o.status.success()) - .unwrap_or(false); - if !result { - println!("`hg` not available, skipping test"); - } - result -} - -#[cargo_test] -fn simple_lib() { - cargo_process("init --lib --vcs none --edition 2015") - .with_stderr("[CREATED] library package") - .run(); - - assert!(paths::root().join("Cargo.toml").is_file()); - assert!(paths::root().join("src/lib.rs").is_file()); - assert!(!paths::root().join(".gitignore").is_file()); - - cargo_process("build").run(); -} - -#[cargo_test] -fn simple_bin() { - let path = paths::root().join("foo"); - fs::create_dir(&path).unwrap(); - cargo_process("init --bin --vcs none --edition 2015") - .cwd(&path) - .with_stderr("[CREATED] binary (application) package") - .run(); - - assert!(paths::root().join("foo/Cargo.toml").is_file()); - assert!(paths::root().join("foo/src/main.rs").is_file()); - - cargo_process("build").cwd(&path).run(); - assert!(paths::root() - .join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)) - .is_file()); -} - -#[cargo_test] -fn simple_git_ignore_exists() { - // write a .gitignore file with two entries - fs::create_dir_all(paths::root().join("foo")).unwrap(); - fs::write( - paths::root().join("foo/.gitignore"), - "/target\n**/some.file", - ) - .unwrap(); - - cargo_process("init --lib foo --edition 2015").run(); - - assert!(paths::root().is_dir()); - assert!(paths::root().join("foo/Cargo.toml").is_file()); - assert!(paths::root().join("foo/src/lib.rs").is_file()); - assert!(paths::root().join("foo/.git").is_dir()); - assert!(paths::root().join("foo/.gitignore").is_file()); - - let fp = paths::root().join("foo/.gitignore"); - let contents = fs::read_to_string(fp).unwrap(); - assert_eq!( - contents, - "/target\n\ - **/some.file\n\n\ - # Added by cargo\n\ - #\n\ - # already existing elements were commented out\n\ - \n\ - #/target\n\ - Cargo.lock\n", - ); - - cargo_process("build").cwd(&paths::root().join("foo")).run(); -} - -#[cargo_test] -fn git_ignore_exists_no_conflicting_entries() { - // write a .gitignore file with one entry - fs::create_dir_all(paths::root().join("foo")).unwrap(); - fs::write(paths::root().join("foo/.gitignore"), "**/some.file").unwrap(); - - cargo_process("init --lib foo --edition 2015").run(); - - let fp = paths::root().join("foo/.gitignore"); - let contents = fs::read_to_string(&fp).unwrap(); - assert_eq!( - contents, - "**/some.file\n\n\ - # Added by cargo\n\ - \n\ - /target\n\ - Cargo.lock\n", - ); -} - -#[cargo_test] -fn both_lib_and_bin() { - cargo_process("init --lib --bin") - .with_status(101) - .with_stderr("[ERROR] can't specify both lib and binary outputs") - .run(); -} - -fn bin_already_exists(explicit: bool, rellocation: &str) { - let path = paths::root().join("foo"); - fs::create_dir_all(&path.join("src")).unwrap(); - - let sourcefile_path = path.join(rellocation); - - let content = r#" - fn main() { - println!("Hello, world 2!"); - } - "#; - - fs::write(&sourcefile_path, content).unwrap(); - - if explicit { - cargo_process("init --bin --vcs none").cwd(&path).run(); - } else { - cargo_process("init --vcs none").cwd(&path).run(); - } - - assert!(paths::root().join("foo/Cargo.toml").is_file()); - assert!(!paths::root().join("foo/src/lib.rs").is_file()); - - // Check that our file is not overwritten - let new_content = fs::read_to_string(&sourcefile_path).unwrap(); - assert_eq!(content, new_content); -} - -#[cargo_test] -fn bin_already_exists_explicit() { - bin_already_exists(true, "src/main.rs") -} - -#[cargo_test] -fn bin_already_exists_implicit() { - bin_already_exists(false, "src/main.rs") -} - -#[cargo_test] -fn bin_already_exists_explicit_nosrc() { - bin_already_exists(true, "main.rs") -} - -#[cargo_test] -fn bin_already_exists_implicit_nosrc() { - bin_already_exists(false, "main.rs") -} - -#[cargo_test] -fn bin_already_exists_implicit_namenosrc() { - bin_already_exists(false, "foo.rs") -} - -#[cargo_test] -fn bin_already_exists_implicit_namesrc() { - bin_already_exists(false, "src/foo.rs") -} - -#[cargo_test] -fn confused_by_multiple_lib_files() { - let path = paths::root().join("foo"); - fs::create_dir_all(&path.join("src")).unwrap(); - - let path1 = path.join("src/lib.rs"); - fs::write(path1, r#"fn qqq () { println!("Hello, world 2!"); }"#).unwrap(); - - let path2 = path.join("lib.rs"); - fs::write(path2, r#" fn qqq () { println!("Hello, world 3!"); }"#).unwrap(); - - cargo_process("init --vcs none") - .cwd(&path) - .with_status(101) - .with_stderr( - "[ERROR] cannot have a package with multiple libraries, \ - found both `src/lib.rs` and `lib.rs`", - ) - .run(); - - assert!(!paths::root().join("foo/Cargo.toml").is_file()); -} - -#[cargo_test] -fn multibin_project_name_clash() { - let path = paths::root().join("foo"); - fs::create_dir(&path).unwrap(); - - let path1 = path.join("foo.rs"); - fs::write(path1, r#"fn main () { println!("Hello, world 2!"); }"#).unwrap(); - - let path2 = path.join("main.rs"); - fs::write(path2, r#"fn main () { println!("Hello, world 3!"); }"#).unwrap(); - - cargo_process("init --lib --vcs none") - .cwd(&path) - .with_status(101) - .with_stderr( - "\ -[ERROR] multiple possible binary sources found: - main.rs - foo.rs -cannot automatically generate Cargo.toml as the main target would be ambiguous -", - ) - .run(); - - assert!(!paths::root().join("foo/Cargo.toml").is_file()); -} - -fn lib_already_exists(rellocation: &str) { - let path = paths::root().join("foo"); - fs::create_dir_all(&path.join("src")).unwrap(); - - let sourcefile_path = path.join(rellocation); - - let content = "pub fn qqq() {}"; - fs::write(&sourcefile_path, content).unwrap(); - - cargo_process("init --vcs none").cwd(&path).run(); - - assert!(paths::root().join("foo/Cargo.toml").is_file()); - assert!(!paths::root().join("foo/src/main.rs").is_file()); - - // Check that our file is not overwritten - let new_content = fs::read_to_string(&sourcefile_path).unwrap(); - assert_eq!(content, new_content); -} - -#[cargo_test] -fn lib_already_exists_src() { - lib_already_exists("src/lib.rs"); -} - -#[cargo_test] -fn lib_already_exists_nosrc() { - lib_already_exists("lib.rs"); -} - -#[cargo_test] -fn simple_git() { - cargo_process("init --lib --vcs git").run(); - - assert!(paths::root().join("Cargo.toml").is_file()); - assert!(paths::root().join("src/lib.rs").is_file()); - assert!(paths::root().join(".git").is_dir()); - assert!(paths::root().join(".gitignore").is_file()); -} - -#[cargo_test] -fn auto_git() { - cargo_process("init --lib").run(); - - assert!(paths::root().join("Cargo.toml").is_file()); - assert!(paths::root().join("src/lib.rs").is_file()); - assert!(paths::root().join(".git").is_dir()); - assert!(paths::root().join(".gitignore").is_file()); -} - -#[cargo_test] -fn invalid_dir_name() { - let foo = &paths::root().join("foo.bar"); - fs::create_dir_all(&foo).unwrap(); - cargo_process("init") - .cwd(foo.clone()) - .with_status(101) - .with_stderr( - "\ -[ERROR] invalid character `.` in package name: `foo.bar`, [..] -If you need a package name to not match the directory name, consider using --name flag. -If you need a binary with the name \"foo.bar\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/foo.bar.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"foo.bar\" - path = \"src/main.rs\" - -", - ) - .run(); - - assert!(!foo.join("Cargo.toml").is_file()); -} - -#[cargo_test] -fn reserved_name() { - let test = &paths::root().join("test"); - fs::create_dir_all(&test).unwrap(); - cargo_process("init") - .cwd(test.clone()) - .with_status(101) - .with_stderr( - "\ -[ERROR] the name `test` cannot be used as a package name, it conflicts [..]\n\ -If you need a package name to not match the directory name, consider using --name flag. -If you need a binary with the name \"test\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/test.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"test\" - path = \"src/main.rs\" - -", - ) - .run(); - - assert!(!test.join("Cargo.toml").is_file()); -} - -#[cargo_test] -fn git_autodetect() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - - cargo_process("init --lib").run(); - - assert!(paths::root().join("Cargo.toml").is_file()); - assert!(paths::root().join("src/lib.rs").is_file()); - assert!(paths::root().join(".git").is_dir()); - assert!(paths::root().join(".gitignore").is_file()); -} - -#[cargo_test] -fn mercurial_autodetect() { - fs::create_dir(&paths::root().join(".hg")).unwrap(); - - cargo_process("init --lib").run(); - - assert!(paths::root().join("Cargo.toml").is_file()); - assert!(paths::root().join("src/lib.rs").is_file()); - assert!(!paths::root().join(".git").is_dir()); - assert!(paths::root().join(".hgignore").is_file()); -} - -#[cargo_test] -fn gitignore_appended_not_replaced() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - - fs::write(&paths::root().join(".gitignore"), "qqqqqq\n").unwrap(); - - cargo_process("init --lib").run(); - - assert!(paths::root().join("Cargo.toml").is_file()); - assert!(paths::root().join("src/lib.rs").is_file()); - assert!(paths::root().join(".git").is_dir()); - assert!(paths::root().join(".gitignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - assert!(contents.contains("qqqqqq")); -} - -#[cargo_test] -fn gitignore_added_newline_in_existing() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - - fs::write(&paths::root().join(".gitignore"), "first").unwrap(); - - cargo_process("init --lib").run(); - - assert!(paths::root().join(".gitignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - assert!(contents.starts_with("first\n")); -} - -#[cargo_test] -fn gitignore_no_newline_in_new() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - - cargo_process("init --lib").run(); - - assert!(paths::root().join(".gitignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - assert!(!contents.starts_with('\n')); -} - -#[cargo_test] -fn mercurial_added_newline_in_existing() { - fs::create_dir(&paths::root().join(".hg")).unwrap(); - - fs::write(&paths::root().join(".hgignore"), "first").unwrap(); - - cargo_process("init --lib").run(); - - assert!(paths::root().join(".hgignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".hgignore")).unwrap(); - assert!(contents.starts_with("first\n")); -} - -#[cargo_test] -fn mercurial_no_newline_in_new() { - fs::create_dir(&paths::root().join(".hg")).unwrap(); - - cargo_process("init --lib").run(); - - assert!(paths::root().join(".hgignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".hgignore")).unwrap(); - assert!(!contents.starts_with('\n')); -} - -#[cargo_test] -fn terminating_newline_in_new_git_ignore() { - cargo_process("init --vcs git --lib").run(); - - let content = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - - let mut last_chars = content.chars().rev(); - assert_eq!(last_chars.next(), Some('\n')); - assert_ne!(last_chars.next(), Some('\n')); -} - -#[cargo_test] -fn terminating_newline_in_new_mercurial_ignore() { - if !mercurial_available() { - return; - } - cargo_process("init --vcs hg --lib").run(); - - let content = fs::read_to_string(&paths::root().join(".hgignore")).unwrap(); - - let mut last_chars = content.chars().rev(); - assert_eq!(last_chars.next(), Some('\n')); - assert_ne!(last_chars.next(), Some('\n')); -} - -#[cargo_test] -fn terminating_newline_in_existing_git_ignore() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - fs::write(&paths::root().join(".gitignore"), b"first").unwrap(); - - cargo_process("init --lib").run(); - - let content = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - - let mut last_chars = content.chars().rev(); - assert_eq!(last_chars.next(), Some('\n')); - assert_ne!(last_chars.next(), Some('\n')); -} - -#[cargo_test] -fn terminating_newline_in_existing_mercurial_ignore() { - fs::create_dir(&paths::root().join(".hg")).unwrap(); - fs::write(&paths::root().join(".hgignore"), b"first").unwrap(); - - cargo_process("init --lib").run(); - - let content = fs::read_to_string(&paths::root().join(".hgignore")).unwrap(); - - let mut last_chars = content.chars().rev(); - assert_eq!(last_chars.next(), Some('\n')); - assert_ne!(last_chars.next(), Some('\n')); -} - -#[cargo_test] -fn cargo_lock_gitignored_if_lib1() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - - cargo_process("init --lib --vcs git").run(); - - assert!(paths::root().join(".gitignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - assert!(contents.contains(r#"Cargo.lock"#)); -} - -#[cargo_test] -fn cargo_lock_gitignored_if_lib2() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - - fs::write(&paths::root().join("lib.rs"), "").unwrap(); - - cargo_process("init --vcs git").run(); - - assert!(paths::root().join(".gitignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - assert!(contents.contains(r#"Cargo.lock"#)); -} - -#[cargo_test] -fn cargo_lock_not_gitignored_if_bin1() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - - cargo_process("init --vcs git --bin").run(); - - assert!(paths::root().join(".gitignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - assert!(!contents.contains(r#"Cargo.lock"#)); -} - -#[cargo_test] -fn cargo_lock_not_gitignored_if_bin2() { - fs::create_dir(&paths::root().join(".git")).unwrap(); - - fs::write(&paths::root().join("main.rs"), "").unwrap(); - - cargo_process("init --vcs git").run(); - - assert!(paths::root().join(".gitignore").is_file()); - - let contents = fs::read_to_string(&paths::root().join(".gitignore")).unwrap(); - assert!(!contents.contains(r#"Cargo.lock"#)); -} - -#[cargo_test] -fn with_argument() { - cargo_process("init foo --vcs none").run(); - assert!(paths::root().join("foo/Cargo.toml").is_file()); -} - -#[cargo_test] -fn unknown_flags() { - cargo_process("init foo --flag") - .with_status(1) - .with_stderr_contains( - "error: Found argument '--flag' which wasn't expected, or isn't valid in this context", - ) - .run(); -} - -#[cfg(not(windows))] -#[cargo_test] -fn no_filename() { - cargo_process("init /") - .with_status(101) - .with_stderr( - "[ERROR] cannot auto-detect package name from path \"/\" ; use --name to override" - .to_string(), - ) - .run(); -} - -#[cargo_test] -fn formats_source() { - if !command_is_available("rustfmt") { - return; - } - - fs::write(&paths::root().join("rustfmt.toml"), "tab_spaces = 2").unwrap(); - - cargo_process("init --lib") - .with_stderr("[CREATED] library package") - .run(); - - assert_eq!( - fs::read_to_string(paths::root().join("src/lib.rs")).unwrap(), - r#"#[cfg(test)] -mod tests { - #[test] - fn it_works() { - let result = 2 + 2; - assert_eq!(result, 4); - } -} -"# - ); -} - -#[cargo_test] -fn ignores_failure_to_format_source() { - cargo_process("init --lib") - .env("PATH", "") // pretend that `rustfmt` is missing - .with_stderr("[CREATED] library package") - .run(); - - assert_eq!( - fs::read_to_string(paths::root().join("src/lib.rs")).unwrap(), - r#"#[cfg(test)] -mod tests { - #[test] - fn it_works() { - let result = 2 + 2; - assert_eq!(result, 4); - } -} -"# - ); -} - -#[cargo_test] -fn creates_binary_when_instructed_and_has_lib_file_no_warning() { - let path = paths::root().join("foo"); - fs::create_dir(&path).unwrap(); - fs::write(path.join("foo.rs"), "fn not_main() {}").unwrap(); - cargo_process("init --bin") - .cwd(&path) - .with_stderr( - "\ -[WARNING] file `foo.rs` seems to be a library file -[CREATED] binary (application) package -", - ) - .run(); - - let cargo_toml = fs::read_to_string(path.join("Cargo.toml")).unwrap(); - assert!(cargo_toml.contains("[[bin]]")); - assert!(!cargo_toml.contains("[lib]")); -} - -#[cargo_test] -fn creates_library_when_instructed_and_has_bin_file() { - let path = paths::root().join("foo"); - fs::create_dir(&path).unwrap(); - fs::write(path.join("foo.rs"), "fn main() {}").unwrap(); - cargo_process("init --lib") - .cwd(&path) - .with_stderr( - "\ -[WARNING] file `foo.rs` seems to be a binary (application) file -[CREATED] library package -", - ) - .run(); - - let cargo_toml = fs::read_to_string(path.join("Cargo.toml")).unwrap(); - assert!(!cargo_toml.contains("[[bin]]")); - assert!(cargo_toml.contains("[lib]")); -} - -#[cargo_test] -fn creates_binary_when_both_binlib_present() { - let path = paths::root().join("foo"); - fs::create_dir(&path).unwrap(); - fs::write(path.join("foo.rs"), "fn main() {}").unwrap(); - fs::write(path.join("lib.rs"), "fn notmain() {}").unwrap(); - cargo_process("init --bin") - .cwd(&path) - .with_stderr("[CREATED] binary (application) package") - .run(); - - let cargo_toml = fs::read_to_string(path.join("Cargo.toml")).unwrap(); - assert!(cargo_toml.contains("[[bin]]")); - assert!(cargo_toml.contains("[lib]")); -} - -#[cargo_test] -fn cant_create_library_when_both_binlib_present() { - let path = paths::root().join("foo"); - fs::create_dir(&path).unwrap(); - fs::write(path.join("foo.rs"), "fn main() {}").unwrap(); - fs::write(path.join("lib.rs"), "fn notmain() {}").unwrap(); - cargo_process("init --lib") - .cwd(&path) - .with_status(101) - .with_stderr( - "[ERROR] cannot have a package with multiple libraries, found both `foo.rs` and `lib.rs`" - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/install.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/install.rs deleted file mode 100644 index b28b3743c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/install.rs +++ /dev/null @@ -1,1917 +0,0 @@ -//! Tests for the `cargo install` command. - -use std::fs::{self, OpenOptions}; -use std::io::prelude::*; - -use cargo_test_support::cross_compile; -use cargo_test_support::git; -use cargo_test_support::registry::{self, registry_path, registry_url, Package}; -use cargo_test_support::{ - basic_manifest, cargo_process, no_such_file_err_msg, project, symlink_supported, t, -}; - -use cargo_test_support::install::{ - assert_has_installed_exe, assert_has_not_installed_exe, cargo_home, -}; -use cargo_test_support::paths::{self, CargoPathExt}; -use std::env; -use std::path::PathBuf; - -fn pkg(name: &str, vers: &str) { - Package::new(name, vers) - .file("src/lib.rs", "") - .file( - "src/main.rs", - &format!("extern crate {}; fn main() {{}}", name), - ) - .publish(); -} - -#[cargo_test] -fn simple() { - pkg("foo", "0.0.1"); - - cargo_process("install foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.0.1 (registry [..]) -[INSTALLING] foo v0.0.1 -[COMPILING] foo v0.0.1 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); - - cargo_process("uninstall foo") - .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]") - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn simple_with_message_format() { - pkg("foo", "0.0.1"); - - cargo_process("install foo --message-format=json") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.0.1 (registry [..]) -[INSTALLING] foo v0.0.1 -[COMPILING] foo v0.0.1 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .with_json( - r#" - { - "reason": "compiler-artifact", - "package_id": "foo 0.0.1 ([..])", - "manifest_path": "[..]", - "target": { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "name": "foo", - "src_path": "[..]/foo-0.0.1/src/lib.rs", - "edition": "2015", - "doc": true, - "doctest": true, - "test": true - }, - "profile": "{...}", - "features": [], - "filenames": "{...}", - "executable": null, - "fresh": false - } - - { - "reason": "compiler-artifact", - "package_id": "foo 0.0.1 ([..])", - "manifest_path": "[..]", - "target": { - "kind": [ - "bin" - ], - "crate_types": [ - "bin" - ], - "name": "foo", - "src_path": "[..]/foo-0.0.1/src/main.rs", - "edition": "2015", - "doc": true, - "doctest": false, - "test": true - }, - "profile": "{...}", - "features": [], - "filenames": "{...}", - "executable": "[..]", - "fresh": false - } - - {"reason":"build-finished","success":true} - "#, - ) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn with_index() { - pkg("foo", "0.0.1"); - - cargo_process("install foo --index") - .arg(registry_url().to_string()) - .with_stderr(&format!( - "\ -[UPDATING] `{reg}` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.0.1 (registry `{reg}`) -[INSTALLING] foo v0.0.1 (registry `{reg}`) -[COMPILING] foo v0.0.1 (registry `{reg}`) -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v0.0.1 (registry `{reg}`)` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - reg = registry_path().to_str().unwrap() - )) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); - - cargo_process("uninstall foo") - .with_stderr("[REMOVING] [CWD]/home/.cargo/bin/foo[EXE]") - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn multiple_pkgs() { - pkg("foo", "0.0.1"); - pkg("bar", "0.0.2"); - - cargo_process("install foo bar baz") - .with_status(101) - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.0.1 (registry `dummy-registry`) -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.2 (registry `dummy-registry`) -[ERROR] could not find `baz` in registry `[..]` with version `*` -[INSTALLING] foo v0.0.1 -[COMPILING] foo v0.0.1 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) -[INSTALLING] bar v0.0.2 -[COMPILING] bar v0.0.2 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE] -[INSTALLED] package `bar v0.0.2` (executable `bar[EXE]`) -[SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above). -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -[ERROR] some crates failed to install -", - ) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); - assert_has_installed_exe(cargo_home(), "bar"); - - cargo_process("uninstall foo bar") - .with_stderr( - "\ -[REMOVING] [CWD]/home/.cargo/bin/foo[EXE] -[REMOVING] [CWD]/home/.cargo/bin/bar[EXE] -[SUMMARY] Successfully uninstalled foo, bar! -", - ) - .run(); - - assert_has_not_installed_exe(cargo_home(), "foo"); - assert_has_not_installed_exe(cargo_home(), "bar"); -} - -fn path() -> Vec { - env::split_paths(&env::var_os("PATH").unwrap_or_default()).collect() -} - -#[cargo_test] -fn multiple_pkgs_path_set() { - // confirm partial failure results in 101 status code and does not have the - // '[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries' - // even if CARGO_HOME/bin is in the PATH - pkg("foo", "0.0.1"); - pkg("bar", "0.0.2"); - - // add CARGO_HOME/bin to path - let mut path = path(); - path.push(cargo_home().join("bin")); - let new_path = env::join_paths(path).unwrap(); - cargo_process("install foo bar baz") - .env("PATH", new_path) - .with_status(101) - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.0.1 (registry `dummy-registry`) -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.2 (registry `dummy-registry`) -[ERROR] could not find `baz` in registry `[..]` with version `*` -[INSTALLING] foo v0.0.1 -[COMPILING] foo v0.0.1 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v0.0.1` (executable `foo[EXE]`) -[INSTALLING] bar v0.0.2 -[COMPILING] bar v0.0.2 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE] -[INSTALLED] package `bar v0.0.2` (executable `bar[EXE]`) -[SUMMARY] Successfully installed foo, bar! Failed to install baz (see error(s) above). -[ERROR] some crates failed to install -", - ) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); - assert_has_installed_exe(cargo_home(), "bar"); - - cargo_process("uninstall foo bar") - .with_stderr( - "\ -[REMOVING] [CWD]/home/.cargo/bin/foo[EXE] -[REMOVING] [CWD]/home/.cargo/bin/bar[EXE] -[SUMMARY] Successfully uninstalled foo, bar! -", - ) - .run(); - - assert_has_not_installed_exe(cargo_home(), "foo"); - assert_has_not_installed_exe(cargo_home(), "bar"); -} - -#[cargo_test] -fn pick_max_version() { - pkg("foo", "0.1.0"); - pkg("foo", "0.2.0"); - pkg("foo", "0.2.1"); - pkg("foo", "0.2.1-pre.1"); - pkg("foo", "0.3.0-pre.2"); - - cargo_process("install foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.2.1 (registry [..]) -[INSTALLING] foo v0.2.1 -[COMPILING] foo v0.2.1 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v0.2.1` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn installs_beta_version_by_explicit_name_from_git() { - let p = git::repo(&paths::root().join("foo")) - .file("Cargo.toml", &basic_manifest("foo", "0.3.0-beta.1")) - .file("src/main.rs", "fn main() {}") - .build(); - - cargo_process("install --git") - .arg(p.url().to_string()) - .arg("foo") - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn missing() { - pkg("foo", "0.0.1"); - cargo_process("install bar") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -[ERROR] could not find `bar` in registry `[..]` with version `*` -", - ) - .run(); -} - -#[cargo_test] -fn missing_current_working_directory() { - cargo_process("install .") - .with_status(101) - .with_stderr( - "\ -error: To install the binaries for the package in current working \ -directory use `cargo install --path .`. Use `cargo build` if you \ -want to simply build the package. -", - ) - .run(); -} - -#[cargo_test] -fn bad_version() { - pkg("foo", "0.0.1"); - cargo_process("install foo --vers=0.2.0") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -[ERROR] could not find `foo` in registry `[..]` with version `=0.2.0` -", - ) - .run(); -} - -#[cargo_test] -fn bad_paths() { - cargo_process("install") - .with_status(101) - .with_stderr("[ERROR] `[CWD]` is not a crate root; specify a crate to install [..]") - .run(); - - cargo_process("install --path .") - .with_status(101) - .with_stderr("[ERROR] `[CWD]` does not contain a Cargo.toml file[..]") - .run(); - - let toml = paths::root().join("Cargo.toml"); - fs::write(toml, "").unwrap(); - cargo_process("install --path Cargo.toml") - .with_status(101) - .with_stderr("[ERROR] `[CWD]/Cargo.toml` is not a directory[..]") - .run(); - - cargo_process("install --path .") - .with_status(101) - .with_stderr_contains("[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`") - .run(); -} - -#[cargo_test] -fn install_location_precedence() { - pkg("foo", "0.0.1"); - - let root = paths::root(); - let t1 = root.join("t1"); - let t2 = root.join("t2"); - let t3 = root.join("t3"); - let t4 = cargo_home(); - - fs::create_dir(root.join(".cargo")).unwrap(); - fs::write( - root.join(".cargo/config"), - &format!( - "[install] - root = '{}' - ", - t3.display() - ), - ) - .unwrap(); - - println!("install --root"); - - cargo_process("install foo --root") - .arg(&t1) - .env("CARGO_INSTALL_ROOT", &t2) - .run(); - assert_has_installed_exe(&t1, "foo"); - assert_has_not_installed_exe(&t2, "foo"); - - println!("install CARGO_INSTALL_ROOT"); - - cargo_process("install foo") - .env("CARGO_INSTALL_ROOT", &t2) - .run(); - assert_has_installed_exe(&t2, "foo"); - assert_has_not_installed_exe(&t3, "foo"); - - println!("install install.root"); - - cargo_process("install foo").run(); - assert_has_installed_exe(&t3, "foo"); - assert_has_not_installed_exe(&t4, "foo"); - - fs::remove_file(root.join(".cargo/config")).unwrap(); - - println!("install cargo home"); - - cargo_process("install foo").run(); - assert_has_installed_exe(&t4, "foo"); -} - -#[cargo_test] -fn install_path() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - cargo_process("install --path").arg(p.root()).run(); - assert_has_installed_exe(cargo_home(), "foo"); - // path-style installs force a reinstall - p.cargo("install --path .") - .with_stderr( - "\ -[INSTALLING] foo v0.0.1 [..] -[FINISHED] release [..] -[REPLACING] [..]/.cargo/bin/foo[EXE] -[REPLACED] package `foo v0.0.1 [..]` with `foo v0.0.1 [..]` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); -} - -#[cargo_test] -fn install_target_dir() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("install --target-dir td_test") - .with_stderr( - "\ -[WARNING] Using `cargo install` [..] -[INSTALLING] foo v0.0.1 [..] -[COMPILING] foo v0.0.1 [..] -[FINISHED] release [..] -[INSTALLING] [..]foo[EXE] -[INSTALLED] package `foo v0.0.1 [..]foo[..]` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); - - let mut path = p.root(); - path.push("td_test"); - assert!(path.exists()); - - #[cfg(not(windows))] - path.push("release/foo"); - #[cfg(windows)] - path.push("release/foo.exe"); - assert!(path.exists()); -} - -#[cargo_test] -#[cfg(target_os = "linux")] -fn install_path_with_lowercase_cargo_toml() { - let toml = paths::root().join("cargo.toml"); - fs::write(toml, "").unwrap(); - - cargo_process("install --path .") - .with_status(101) - .with_stderr( - "\ -[ERROR] `[CWD]` does not contain a Cargo.toml file, \ -but found cargo.toml please try to rename it to Cargo.toml. --path must point to a directory containing a Cargo.toml file. -", - ) - .run(); -} - -#[cargo_test] -fn multiple_crates_error() { - let p = git::repo(&paths::root().join("foo")) - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "fn main() {}") - .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("a/src/main.rs", "fn main() {}") - .build(); - - cargo_process("install --git") - .arg(p.url().to_string()) - .with_status(101) - .with_stderr( - "\ -[UPDATING] git repository [..] -[ERROR] multiple packages with binaries found: bar, foo. \ -When installing a git repository, cargo will always search the entire repo for any Cargo.toml. \ -Please specify which to install. -", - ) - .run(); -} - -#[cargo_test] -fn multiple_crates_select() { - let p = git::repo(&paths::root().join("foo")) - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "fn main() {}") - .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("a/src/main.rs", "fn main() {}") - .build(); - - cargo_process("install --git") - .arg(p.url().to_string()) - .arg("foo") - .run(); - assert_has_installed_exe(cargo_home(), "foo"); - assert_has_not_installed_exe(cargo_home(), "bar"); - - cargo_process("install --git") - .arg(p.url().to_string()) - .arg("bar") - .run(); - assert_has_installed_exe(cargo_home(), "bar"); -} - -#[cargo_test] -fn multiple_crates_git_all() { - let p = git::repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bin1", "bin2"] - "#, - ) - .file("bin1/Cargo.toml", &basic_manifest("bin1", "0.1.0")) - .file("bin2/Cargo.toml", &basic_manifest("bin2", "0.1.0")) - .file( - "bin1/src/main.rs", - r#"fn main() { println!("Hello, world!"); }"#, - ) - .file( - "bin2/src/main.rs", - r#"fn main() { println!("Hello, world!"); }"#, - ) - .build(); - - cargo_process(&format!("install --git {} bin1 bin2", p.url().to_string())).run(); -} - -#[cargo_test] -fn multiple_crates_auto_binaries() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "a" } - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() {}") - .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("a/src/lib.rs", "") - .build(); - - cargo_process("install --path").arg(p.root()).run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn multiple_crates_auto_examples() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "a" } - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file( - "examples/foo.rs", - " - extern crate bar; - extern crate foo; - fn main() {} - ", - ) - .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("a/src/lib.rs", "") - .build(); - - cargo_process("install --path") - .arg(p.root()) - .arg("--example=foo") - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn no_binaries_or_examples() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("a/src/lib.rs", "") - .build(); - - cargo_process("install --path") - .arg(p.root()) - .with_status(101) - .with_stderr("[ERROR] no packages found with binaries or examples") - .run(); -} - -#[cargo_test] -fn no_binaries() { - let p = project() - .file("src/lib.rs", "") - .file("examples/foo.rs", "fn main() {}") - .build(); - - cargo_process("install --path") - .arg(p.root()) - .arg("foo") - .with_status(101) - .with_stderr( - "\ -[ERROR] there is nothing to install in `foo v0.0.1 ([..])`, because it has no binaries[..] -[..] -[..]", - ) - .run(); -} - -#[cargo_test] -fn examples() { - let p = project() - .file("src/lib.rs", "") - .file("examples/foo.rs", "extern crate foo; fn main() {}") - .build(); - - cargo_process("install --path") - .arg(p.root()) - .arg("--example=foo") - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn install_force() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - cargo_process("install --path").arg(p.root()).run(); - - let p = project() - .at("foo2") - .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - cargo_process("install --force --path") - .arg(p.root()) - .with_stderr( - "\ -[INSTALLING] foo v0.2.0 ([..]) -[COMPILING] foo v0.2.0 ([..]) -[FINISHED] release [optimized] target(s) in [..] -[REPLACING] [CWD]/home/.cargo/bin/foo[EXE] -[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); - - cargo_process("install --list") - .with_stdout( - "\ -foo v0.2.0 ([..]): - foo[..] -", - ) - .run(); -} - -#[cargo_test] -fn install_force_partial_overlap() { - let p = project() - .file("src/bin/foo-bin1.rs", "fn main() {}") - .file("src/bin/foo-bin2.rs", "fn main() {}") - .build(); - - cargo_process("install --path").arg(p.root()).run(); - - let p = project() - .at("foo2") - .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) - .file("src/bin/foo-bin2.rs", "fn main() {}") - .file("src/bin/foo-bin3.rs", "fn main() {}") - .build(); - - cargo_process("install --force --path") - .arg(p.root()) - .with_stderr( - "\ -[INSTALLING] foo v0.2.0 ([..]) -[COMPILING] foo v0.2.0 ([..]) -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo-bin3[EXE] -[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE] -[REMOVING] executable `[..]/bin/foo-bin1[EXE]` from previous version foo v0.0.1 [..] -[INSTALLED] package `foo v0.2.0 ([..]/foo2)` (executable `foo-bin3[EXE]`) -[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo-bin2[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); - - cargo_process("install --list") - .with_stdout( - "\ -foo v0.2.0 ([..]): - foo-bin2[..] - foo-bin3[..] -", - ) - .run(); -} - -#[cargo_test] -fn install_force_bin() { - let p = project() - .file("src/bin/foo-bin1.rs", "fn main() {}") - .file("src/bin/foo-bin2.rs", "fn main() {}") - .build(); - - cargo_process("install --path").arg(p.root()).run(); - - let p = project() - .at("foo2") - .file("Cargo.toml", &basic_manifest("foo", "0.2.0")) - .file("src/bin/foo-bin1.rs", "fn main() {}") - .file("src/bin/foo-bin2.rs", "fn main() {}") - .build(); - - cargo_process("install --force --bin foo-bin2 --path") - .arg(p.root()) - .with_stderr( - "\ -[INSTALLING] foo v0.2.0 ([..]) -[COMPILING] foo v0.2.0 ([..]) -[FINISHED] release [optimized] target(s) in [..] -[REPLACING] [CWD]/home/.cargo/bin/foo-bin2[EXE] -[REPLACED] package `foo v0.0.1 ([..]/foo)` with `foo v0.2.0 ([..]/foo2)` (executable `foo-bin2[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); - - cargo_process("install --list") - .with_stdout( - "\ -foo v0.0.1 ([..]): - foo-bin1[..] -foo v0.2.0 ([..]): - foo-bin2[..] -", - ) - .run(); -} - -#[cargo_test] -fn compile_failure() { - let p = project().file("src/main.rs", "").build(); - - cargo_process("install --path") - .arg(p.root()) - .with_status(101) - .with_stderr_contains( - "\ -[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be \ - found at `[..]target` - -Caused by: - could not compile `foo` due to previous error -", - ) - .run(); -} - -#[cargo_test] -fn git_repo() { - let p = git::repo(&paths::root().join("foo")) - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - // Use `--locked` to test that we don't even try to write a lock file. - cargo_process("install --locked --git") - .arg(p.url().to_string()) - .with_stderr( - "\ -[UPDATING] git repository `[..]` -[WARNING] no Cargo.lock file published in foo v0.1.0 ([..]) -[INSTALLING] foo v0.1.0 ([..]) -[COMPILING] foo v0.1.0 ([..]) -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v0.1.0 ([..]/foo#[..])` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -#[cfg(target_os = "linux")] -fn git_repo_with_lowercase_cargo_toml() { - let p = git::repo(&paths::root().join("foo")) - .file("cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - cargo_process("install --git") - .arg(p.url().to_string()) - .with_status(101) - .with_stderr( - "\ -[UPDATING] git repository [..] -[ERROR] Could not find Cargo.toml in `[..]`, but found cargo.toml please try to rename it to Cargo.toml -", - ) - .run(); -} - -#[cargo_test] -fn list() { - pkg("foo", "0.0.1"); - pkg("bar", "0.2.1"); - pkg("bar", "0.2.2"); - - cargo_process("install --list").with_stdout("").run(); - - cargo_process("install bar --vers =0.2.1").run(); - cargo_process("install foo").run(); - cargo_process("install --list") - .with_stdout( - "\ -bar v0.2.1: - bar[..] -foo v0.0.1: - foo[..] -", - ) - .run(); -} - -#[cargo_test] -fn list_error() { - pkg("foo", "0.0.1"); - cargo_process("install foo").run(); - cargo_process("install --list") - .with_stdout( - "\ -foo v0.0.1: - foo[..] -", - ) - .run(); - let mut worldfile_path = cargo_home(); - worldfile_path.push(".crates.toml"); - let mut worldfile = OpenOptions::new() - .write(true) - .open(worldfile_path) - .expect(".crates.toml should be there"); - worldfile.write_all(b"\x00").unwrap(); - drop(worldfile); - cargo_process("install --list --verbose") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse crate metadata at `[..]` - -Caused by: - invalid TOML found for metadata - -Caused by: - unexpected character[..] -", - ) - .run(); -} - -#[cargo_test] -fn uninstall_pkg_does_not_exist() { - cargo_process("uninstall foo") - .with_status(101) - .with_stderr("[ERROR] package ID specification `foo` did not match any packages") - .run(); -} - -#[cargo_test] -fn uninstall_bin_does_not_exist() { - pkg("foo", "0.0.1"); - - cargo_process("install foo").run(); - cargo_process("uninstall foo --bin=bar") - .with_status(101) - .with_stderr("[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1`") - .run(); -} - -#[cargo_test] -fn uninstall_piecemeal() { - let p = project() - .file("src/bin/foo.rs", "fn main() {}") - .file("src/bin/bar.rs", "fn main() {}") - .build(); - - cargo_process("install --path").arg(p.root()).run(); - assert_has_installed_exe(cargo_home(), "foo"); - assert_has_installed_exe(cargo_home(), "bar"); - - cargo_process("uninstall foo --bin=bar") - .with_stderr("[REMOVING] [..]bar[..]") - .run(); - - assert_has_installed_exe(cargo_home(), "foo"); - assert_has_not_installed_exe(cargo_home(), "bar"); - - cargo_process("uninstall foo --bin=foo") - .with_stderr("[REMOVING] [..]foo[..]") - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); - - cargo_process("uninstall foo") - .with_status(101) - .with_stderr("[ERROR] package ID specification `foo` did not match any packages") - .run(); -} - -#[cargo_test] -fn subcommand_works_out_of_the_box() { - Package::new("cargo-foo", "1.0.0") - .file("src/main.rs", r#"fn main() { println!("bar"); }"#) - .publish(); - cargo_process("install cargo-foo").run(); - cargo_process("foo").with_stdout("bar\n").run(); - cargo_process("--list") - .with_stdout_contains(" foo\n") - .run(); -} - -#[cargo_test] -fn installs_from_cwd_by_default() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("install") - .with_stderr_contains( - "warning: Using `cargo install` to install the binaries for the \ - package in current working directory is deprecated, \ - use `cargo install --path .` instead. \ - Use `cargo build` if you want to simply build the package.", - ) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn installs_from_cwd_with_2018_warnings() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - edition = "2018" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("install") - .with_status(101) - .with_stderr_contains( - "error: Using `cargo install` to install the binaries for the \ - package in current working directory is no longer supported, \ - use `cargo install --path .` instead. \ - Use `cargo build` if you want to simply build the package.", - ) - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn uninstall_cwd() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("install --path .") - .with_stderr(&format!( - "\ -[INSTALLING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] {home}/bin/foo[EXE] -[INSTALLED] package `foo v0.0.1 ([..]/foo)` (executable `foo[EXE]`) -[WARNING] be sure to add `{home}/bin` to your PATH to be able to run the installed binaries", - home = cargo_home().display(), - )) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); - - p.cargo("uninstall") - .with_stdout("") - .with_stderr(&format!( - "[REMOVING] {home}/bin/foo[EXE]", - home = cargo_home().display() - )) - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn uninstall_cwd_not_installed() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("uninstall") - .with_status(101) - .with_stdout("") - .with_stderr("error: package `foo v0.0.1 ([CWD])` is not installed") - .run(); -} - -#[cargo_test] -fn uninstall_cwd_no_project() { - cargo_process("uninstall") - .with_status(101) - .with_stdout("") - .with_stderr(format!( - "\ -[ERROR] failed to read `[CWD]/Cargo.toml` - -Caused by: - {err_msg}", - err_msg = no_such_file_err_msg(), - )) - .run(); -} - -#[cargo_test] -fn do_not_rebuilds_on_local_install() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("build --release").run(); - cargo_process("install --path") - .arg(p.root()) - .with_stderr( - "\ -[INSTALLING] [..] -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..] -[INSTALLED] package `foo v0.0.1 ([..]/foo)` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); - - assert!(p.build_dir().exists()); - assert!(p.release_bin("foo").exists()); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn reports_unsuccessful_subcommand_result() { - Package::new("cargo-fail", "1.0.0") - .file("src/main.rs", "fn main() { panic!(); }") - .publish(); - cargo_process("install cargo-fail").run(); - cargo_process("--list") - .with_stdout_contains(" fail\n") - .run(); - cargo_process("fail") - .with_status(101) - .with_stderr_contains("thread '[..]' panicked at 'explicit panic', [..]") - .run(); -} - -#[cargo_test] -fn git_with_lockfile() { - let p = git::repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "fn main() {}") - .file( - "Cargo.lock", - r#" - [[package]] - name = "foo" - version = "0.1.0" - dependencies = [ "bar 0.1.0" ] - - [[package]] - name = "bar" - version = "0.1.0" - "#, - ) - .build(); - - cargo_process("install --git") - .arg(p.url().to_string()) - .run(); -} - -#[cargo_test] -fn q_silences_warnings() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - cargo_process("install -q --path") - .arg(p.root()) - .with_stderr("") - .run(); -} - -#[cargo_test] -fn readonly_dir() { - pkg("foo", "0.0.1"); - - let root = paths::root(); - let dir = &root.join("readonly"); - fs::create_dir(root.join("readonly")).unwrap(); - let mut perms = fs::metadata(dir).unwrap().permissions(); - perms.set_readonly(true); - fs::set_permissions(dir, perms).unwrap(); - - cargo_process("install foo").cwd(dir).run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn use_path_workspace() { - Package::new("foo", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [workspace] - members = ["baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "baz/Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.0" - authors = [] - - [dependencies] - foo = "1" - "#, - ) - .file("baz/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - let lock = p.read_lockfile(); - p.cargo("install").run(); - let lock2 = p.read_lockfile(); - assert_eq!(lock, lock2, "different lockfiles"); -} - -#[cargo_test] -fn dev_dependencies_no_check() { - Package::new("foo", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dev-dependencies] - baz = "1.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..] no matching package named `baz` found") - .run(); - p.cargo("install").run(); -} - -#[cargo_test] -fn dev_dependencies_lock_file_untouched() { - Package::new("foo", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dev-dependencies] - bar = { path = "a" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("a/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - let lock = p.read_lockfile(); - p.cargo("install").run(); - let lock2 = p.read_lockfile(); - assert!(lock == lock2, "different lockfiles"); -} - -#[cargo_test] -fn install_target_native() { - pkg("foo", "0.1.0"); - - cargo_process("install foo --target") - .arg(cargo_test_support::rustc_host()) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn install_target_foreign() { - if cross_compile::disabled() { - return; - } - - pkg("foo", "0.1.0"); - - cargo_process("install foo --target") - .arg(cross_compile::alternate()) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn vers_precise() { - pkg("foo", "0.1.1"); - pkg("foo", "0.1.2"); - - cargo_process("install foo --vers 0.1.1") - .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])") - .run(); -} - -#[cargo_test] -fn version_too() { - pkg("foo", "0.1.1"); - pkg("foo", "0.1.2"); - - cargo_process("install foo --version 0.1.1") - .with_stderr_contains("[DOWNLOADED] foo v0.1.1 (registry [..])") - .run(); -} - -#[cargo_test] -fn not_both_vers_and_version() { - pkg("foo", "0.1.1"); - pkg("foo", "0.1.2"); - - cargo_process("install foo --version 0.1.1 --vers 0.1.2") - .with_status(1) - .with_stderr_contains( - "\ -error: The argument '--version ' was provided more than once, \ -but cannot be used multiple times -", - ) - .run(); -} - -#[cargo_test] -fn test_install_git_cannot_be_a_base_url() { - cargo_process("install --git github.com:rust-lang/rustfmt.git") - .with_status(101) - .with_stderr( - "\ -[ERROR] invalid url `github.com:rust-lang/rustfmt.git`: cannot-be-a-base-URLs are not supported", - ) - .run(); -} - -#[cargo_test] -fn uninstall_multiple_and_specifying_bin() { - cargo_process("uninstall foo bar --bin baz") - .with_status(101) - .with_stderr("\ -[ERROR] A binary can only be associated with a single installed package, specifying multiple specs with --bin is redundant.") - .run(); -} - -#[cargo_test] -fn uninstall_with_empty_pakcage_option() { - cargo_process("uninstall -p") - .with_status(101) - .with_stderr( - "\ -[ERROR] \"--package \" requires a SPEC format value. -Run `cargo help pkgid` for more information about SPEC format. -", - ) - .run(); -} - -#[cargo_test] -fn uninstall_multiple_and_some_pkg_does_not_exist() { - pkg("foo", "0.0.1"); - - cargo_process("install foo").run(); - - cargo_process("uninstall foo bar") - .with_status(101) - .with_stderr( - "\ -[REMOVING] [CWD]/home/.cargo/bin/foo[EXE] -error: package ID specification `bar` did not match any packages -[SUMMARY] Successfully uninstalled foo! Failed to uninstall bar (see error(s) above). -error: some packages failed to uninstall -", - ) - .run(); - - assert_has_not_installed_exe(cargo_home(), "foo"); - assert_has_not_installed_exe(cargo_home(), "bar"); -} - -#[cargo_test] -fn custom_target_dir_for_git_source() { - let p = git::repo(&paths::root().join("foo")) - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - cargo_process("install --git") - .arg(p.url().to_string()) - .run(); - assert!(!paths::root().join("target/release").is_dir()); - - cargo_process("install --force --git") - .arg(p.url().to_string()) - .env("CARGO_TARGET_DIR", "target") - .run(); - assert!(paths::root().join("target/release").is_dir()); -} - -#[cargo_test] -fn install_respects_lock_file() { - // `cargo install` now requires --locked to use a Cargo.lock. - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.1.1") - .file("src/lib.rs", "not rust") - .publish(); - Package::new("foo", "0.1.0") - .dep("bar", "0.1") - .file("src/lib.rs", "") - .file( - "src/main.rs", - "extern crate foo; extern crate bar; fn main() {}", - ) - .file( - "Cargo.lock", - r#" -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "foo" -version = "0.1.0" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] -"#, - ) - .publish(); - - cargo_process("install foo") - .with_stderr_contains("[..]not rust[..]") - .with_status(101) - .run(); - cargo_process("install --locked foo").run(); -} - -#[cargo_test] -fn install_path_respects_lock_file() { - // --path version of install_path_respects_lock_file, --locked is required - // to use Cargo.lock. - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.1.1") - .file("src/lib.rs", "not rust") - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() {}") - .file( - "Cargo.lock", - r#" -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "foo" -version = "0.1.0" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] -"#, - ) - .build(); - - p.cargo("install --path .") - .with_stderr_contains("[..]not rust[..]") - .with_status(101) - .run(); - p.cargo("install --path . --locked").run(); -} - -#[cargo_test] -fn lock_file_path_deps_ok() { - Package::new("bar", "0.1.0").publish(); - - Package::new("foo", "0.1.0") - .dep("bar", "0.1") - .file("src/lib.rs", "") - .file( - "src/main.rs", - "extern crate foo; extern crate bar; fn main() {}", - ) - .file( - "Cargo.lock", - r#" - [[package]] - name = "bar" - version = "0.1.0" - - [[package]] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar 0.1.0", - ] - "#, - ) - .publish(); - - cargo_process("install foo").run(); -} - -#[cargo_test] -fn install_empty_argument() { - // Bug 5229 - cargo_process("install") - .arg("") - .with_status(1) - .with_stderr_contains( - "[ERROR] The argument '...' requires a value but none was supplied", - ) - .run(); -} - -#[cargo_test] -fn git_repo_replace() { - let p = git::repo(&paths::root().join("foo")) - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "fn main() {}") - .build(); - let repo = git2::Repository::open(&p.root()).unwrap(); - let old_rev = repo.revparse_single("HEAD").unwrap().id(); - cargo_process("install --git") - .arg(p.url().to_string()) - .run(); - git::commit(&repo); - let new_rev = repo.revparse_single("HEAD").unwrap().id(); - let mut path = paths::home(); - path.push(".cargo/.crates.toml"); - - assert_ne!(old_rev, new_rev); - assert!(fs::read_to_string(path.clone()) - .unwrap() - .contains(&format!("{}", old_rev))); - cargo_process("install --force --git") - .arg(p.url().to_string()) - .run(); - assert!(fs::read_to_string(path) - .unwrap() - .contains(&format!("{}", new_rev))); -} - -#[cargo_test] -fn workspace_uses_workspace_target_dir() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - - [dependencies] - bar = { path = 'bar' } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --release").cwd("bar").run(); - cargo_process("install --path") - .arg(p.root().join("bar")) - .with_stderr( - "[INSTALLING] [..] -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..] -[INSTALLED] package `bar v0.1.0 ([..]/bar)` (executable `bar[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); -} - -#[cargo_test] -fn install_ignores_local_cargo_config() { - pkg("bar", "0.0.1"); - - let p = project() - .file( - ".cargo/config", - r#" - [build] - target = "non-existing-target" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("install bar").run(); - assert_has_installed_exe(cargo_home(), "bar"); -} - -#[cargo_test] -fn install_ignores_unstable_table_in_local_cargo_config() { - pkg("bar", "0.0.1"); - - let p = project() - .file( - ".cargo/config", - r#" - [unstable] - build-std = ["core"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("install bar").masquerade_as_nightly_cargo().run(); - assert_has_installed_exe(cargo_home(), "bar"); -} - -#[cargo_test] -fn install_global_cargo_config() { - pkg("bar", "0.0.1"); - - let config = cargo_home().join("config"); - let mut toml = fs::read_to_string(&config).unwrap_or_default(); - - toml.push_str( - r#" - [build] - target = 'nonexistent' - "#, - ); - fs::write(&config, toml).unwrap(); - - cargo_process("install bar") - .with_status(101) - .with_stderr_contains("[..]--target nonexistent[..]") - .run(); -} - -#[cargo_test] -fn install_path_config() { - project() - .file( - ".cargo/config", - r#" - [build] - target = 'nonexistent' - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - cargo_process("install --path foo") - .with_status(101) - .with_stderr_contains("[..]--target nonexistent[..]") - .run(); -} - -#[cargo_test] -fn install_version_req() { - // Try using a few versionreq styles. - pkg("foo", "0.0.3"); - pkg("foo", "1.0.4"); - pkg("foo", "1.0.5"); - cargo_process("install foo --version=*") - .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") - .with_stderr_contains("[INSTALLING] foo v1.0.5") - .run(); - cargo_process("uninstall foo").run(); - cargo_process("install foo --version=^1.0") - .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") - .with_stderr_contains("[INSTALLING] foo v1.0.5") - .run(); - cargo_process("uninstall foo").run(); - cargo_process("install foo --version=0.0.*") - .with_stderr_does_not_contain("[WARNING][..]is not a valid semver[..]") - .with_stderr_contains("[INSTALLING] foo v0.0.3") - .run(); -} - -#[cargo_test] -fn git_install_reads_workspace_manifest() { - let p = git::repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bin1"] - - [profile.release] - incremental = 3 - "#, - ) - .file("bin1/Cargo.toml", &basic_manifest("bin1", "0.1.0")) - .file( - "bin1/src/main.rs", - r#"fn main() { println!("Hello, world!"); }"#, - ) - .build(); - - cargo_process(&format!("install --git {}", p.url().to_string())) - .with_status(101) - .with_stderr_contains(" invalid type: integer `3`[..]") - .run(); -} - -#[cargo_test] -fn install_git_with_symlink_home() { - // Ensure that `cargo install` with a git repo is OK when CARGO_HOME is a - // symlink, and uses an build script. - if !symlink_supported() { - return; - } - let p = git::new("foo", |p| { - p.file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/main.rs", "fn main() {}") - // This triggers discover_git_and_list_files for detecting changed files. - .file("build.rs", "fn main() {}") - }); - #[cfg(unix)] - use std::os::unix::fs::symlink; - #[cfg(windows)] - use std::os::windows::fs::symlink_dir as symlink; - - let actual = paths::root().join("actual-home"); - t!(std::fs::create_dir(&actual)); - t!(symlink(&actual, paths::home().join(".cargo"))); - cargo_process("install --git") - .arg(p.url().to_string()) - .with_stderr( - "\ -[UPDATING] git repository [..] -[INSTALLING] foo v1.0.0 [..] -[COMPILING] foo v1.0.0 [..] -[FINISHED] [..] -[INSTALLING] [..]home/.cargo/bin/foo[..] -[INSTALLED] package `foo [..] -[WARNING] be sure to add [..] -", - ) - .run(); -} - -#[cargo_test] -fn install_yanked_cargo_package() { - Package::new("baz", "0.0.1").yanked(true).publish(); - cargo_process("install baz --version 0.0.1") - .with_status(101) - .with_stderr_contains( - "\ -[ERROR] cannot install package `baz`, it has been yanked from registry `crates-io` -", - ) - .run(); -} - -#[cargo_test] -fn install_cargo_package_in_a_patched_workspace() { - pkg("foo", "0.1.0"); - pkg("fizz", "1.0.0"); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [workspace] - members = ["baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "baz/Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.0" - authors = [] - - [dependencies] - fizz = "1" - - [patch.crates-io] - fizz = { version = "=1.0.0" } - "#, - ) - .file("baz/src/lib.rs", "") - .build(); - - let stderr = "\ -[WARNING] patch for the non root package will be ignored, specify patch at the workspace root: -package: [..]/foo/baz/Cargo.toml -workspace: [..]/foo/Cargo.toml -"; - p.cargo("check").with_stderr_contains(&stderr).run(); - - // A crate installation must not emit any message from a workspace under - // current working directory. - // See https://github.com/rust-lang/cargo/issues/8619 - p.cargo("install foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.1.0 (registry [..]) -[INSTALLING] foo v0.1.0 -[COMPILING] foo v0.1.0 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]foo[EXE] -[INSTALLED] package `foo v0.1.0` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]` to your PATH to be able to run the installed binaries -", - ) - .run(); - assert_has_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn locked_install_without_published_lockfile() { - Package::new("foo", "0.1.0") - .file("src/main.rs", "//! Some docs\nfn main() {}") - .publish(); - - cargo_process("install foo --locked") - .with_stderr_contains("[WARNING] no Cargo.lock file published in foo v0.1.0") - .run(); -} - -#[cargo_test] -fn install_semver_metadata() { - // Check trying to install a package that uses semver metadata. - // This uses alt registry because the bug this is exercising doesn't - // trigger with a replaced source. - registry::alt_init(); - Package::new("foo", "1.0.0+abc") - .alternative(true) - .file("src/main.rs", "fn main() {}") - .publish(); - - cargo_process("install foo --registry alternative --version 1.0.0+abc").run(); - cargo_process("install foo --registry alternative") - .with_stderr("\ -[UPDATING] `alternative` index -[IGNORED] package `foo v1.0.0+abc (registry `alternative`)` is already installed, use --force to override -[WARNING] be sure to add [..] -") - .run(); - // "Updating" is not displayed here due to the --version fast-path. - cargo_process("install foo --registry alternative --version 1.0.0+abc") - .with_stderr("\ -[IGNORED] package `foo v1.0.0+abc (registry `alternative`)` is already installed, use --force to override -[WARNING] be sure to add [..] -") - .run(); - cargo_process("install foo --registry alternative --version 1.0.0 --force") - .with_stderr( - "\ -[UPDATING] `alternative` index -[INSTALLING] foo v1.0.0+abc (registry `alternative`) -[COMPILING] foo v1.0.0+abc (registry `alternative`) -[FINISHED] [..] -[REPLACING] [ROOT]/home/.cargo/bin/foo[EXE] -[REPLACED] package [..] -[WARNING] be sure to add [..] -", - ) - .run(); - // Check that from a fresh cache will work without metadata, too. - paths::home().join(".cargo/registry").rm_rf(); - paths::home().join(".cargo/bin").rm_rf(); - cargo_process("install foo --registry alternative --version 1.0.0") - .with_stderr( - "\ -[UPDATING] `alternative` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v1.0.0+abc (registry `alternative`) -[INSTALLING] foo v1.0.0+abc (registry `alternative`) -[COMPILING] foo v1.0.0+abc (registry `alternative`) -[FINISHED] [..] -[INSTALLING] [ROOT]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v1.0.0+abc (registry `alternative`)` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/install_upgrade.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/install_upgrade.rs deleted file mode 100644 index 1a18bc211..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/install_upgrade.rs +++ /dev/null @@ -1,862 +0,0 @@ -//! Tests for `cargo install` where it upgrades a package if it is out-of-date. - -use cargo::core::PackageId; -use std::collections::BTreeSet; -use std::env; -use std::fs; -use std::path::PathBuf; -use std::sync::atomic::{AtomicUsize, Ordering}; - -use cargo_test_support::install::{cargo_home, exe}; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::{self, Package}; -use cargo_test_support::{ - basic_manifest, cargo_process, cross_compile, execs, git, process, project, Execs, -}; - -fn pkg_maybe_yanked(name: &str, vers: &str, yanked: bool) { - Package::new(name, vers) - .yanked(yanked) - .file( - "src/main.rs", - r#"fn main() { println!("{}", env!("CARGO_PKG_VERSION")) }"#, - ) - .publish(); -} - -// Helper for publishing a package. -fn pkg(name: &str, vers: &str) { - pkg_maybe_yanked(name, vers, false) -} - -fn v1_path() -> PathBuf { - cargo_home().join(".crates.toml") -} - -fn v2_path() -> PathBuf { - cargo_home().join(".crates2.json") -} - -fn load_crates1() -> toml::Value { - toml::from_str(&fs::read_to_string(v1_path()).unwrap()).unwrap() -} - -fn load_crates2() -> serde_json::Value { - serde_json::from_str(&fs::read_to_string(v2_path()).unwrap()).unwrap() -} - -fn installed_exe(name: &str) -> PathBuf { - cargo_home().join("bin").join(exe(name)) -} - -/// Helper for executing binaries installed by cargo. -fn installed_process(name: &str) -> Execs { - static NEXT_ID: AtomicUsize = AtomicUsize::new(0); - thread_local!(static UNIQUE_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst)); - - // This copies the executable to a unique name so that it may be safely - // replaced on Windows. See Project::rename_run for details. - let src = installed_exe(name); - let dst = installed_exe(&UNIQUE_ID.with(|my_id| format!("{}-{}", name, my_id))); - // Note: Cannot use copy. On Linux, file descriptors may be left open to - // the executable as other tests in other threads are constantly spawning - // new processes (see https://github.com/rust-lang/cargo/pull/5557 for - // more). - fs::rename(&src, &dst) - .unwrap_or_else(|e| panic!("Failed to rename `{:?}` to `{:?}`: {}", src, dst, e)); - // Leave behind a fake file so that reinstall duplicate check works. - fs::write(src, "").unwrap(); - let p = process(dst); - execs().with_process_builder(p) -} - -/// Check that the given package name/version has the following bins listed in -/// the trackers. Also verifies that both trackers are in sync and valid. -/// Pass in an empty `bins` list to assert that the package is *not* installed. -fn validate_trackers(name: &str, version: &str, bins: &[&str]) { - let v1 = load_crates1(); - let v1_table = v1.get("v1").unwrap().as_table().unwrap(); - let v2 = load_crates2(); - let v2_table = v2["installs"].as_object().unwrap(); - assert_eq!(v1_table.len(), v2_table.len()); - // Convert `bins` to a BTreeSet. - let bins: BTreeSet = bins - .iter() - .map(|b| format!("{}{}", b, env::consts::EXE_SUFFIX)) - .collect(); - // Check every entry matches between v1 and v2. - for (pkg_id_str, v1_bins) in v1_table { - let pkg_id: PackageId = toml::Value::from(pkg_id_str.to_string()) - .try_into() - .unwrap(); - let v1_bins: BTreeSet = v1_bins - .as_array() - .unwrap() - .iter() - .map(|b| b.as_str().unwrap().to_string()) - .collect(); - if pkg_id.name().as_str() == name && pkg_id.version().to_string() == version { - if bins.is_empty() { - panic!( - "Expected {} to not be installed, but found: {:?}", - name, v1_bins - ); - } else { - assert_eq!(bins, v1_bins); - } - } - let pkg_id_value = serde_json::to_value(&pkg_id).unwrap(); - let pkg_id_str = pkg_id_value.as_str().unwrap(); - let v2_info = v2_table - .get(pkg_id_str) - .expect("v2 missing v1 pkg") - .as_object() - .unwrap(); - let v2_bins = v2_info["bins"].as_array().unwrap(); - let v2_bins: BTreeSet = v2_bins - .iter() - .map(|b| b.as_str().unwrap().to_string()) - .collect(); - assert_eq!(v1_bins, v2_bins); - } -} - -#[cargo_test] -fn registry_upgrade() { - // Installing and upgrading from a registry. - pkg("foo", "1.0.0"); - cargo_process("install foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v1.0.0 (registry [..]) -[INSTALLING] foo v1.0.0 -[COMPILING] foo v1.0.0 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v1.0.0` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); - installed_process("foo").with_stdout("1.0.0").run(); - validate_trackers("foo", "1.0.0", &["foo"]); - - cargo_process("install foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[IGNORED] package `foo v1.0.0` is already installed[..] -[WARNING] be sure to add [..] -", - ) - .run(); - - pkg("foo", "1.0.1"); - - cargo_process("install foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v1.0.1 (registry [..]) -[INSTALLING] foo v1.0.1 -[COMPILING] foo v1.0.1 -[FINISHED] release [optimized] target(s) in [..] -[REPLACING] [CWD]/home/.cargo/bin/foo[EXE] -[REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); - - installed_process("foo").with_stdout("1.0.1").run(); - validate_trackers("foo", "1.0.1", &["foo"]); - - cargo_process("install foo --version=1.0.0") - .with_stderr_contains("[COMPILING] foo v1.0.0") - .run(); - installed_process("foo").with_stdout("1.0.0").run(); - validate_trackers("foo", "1.0.0", &["foo"]); - - cargo_process("install foo --version=^1.0") - .with_stderr_contains("[COMPILING] foo v1.0.1") - .run(); - installed_process("foo").with_stdout("1.0.1").run(); - validate_trackers("foo", "1.0.1", &["foo"]); - - cargo_process("install foo --version=^1.0") - .with_stderr_contains("[IGNORED] package `foo v1.0.1` is already installed[..]") - .run(); -} - -#[cargo_test] -fn uninstall() { - // Basic uninstall test. - pkg("foo", "1.0.0"); - cargo_process("install foo").run(); - cargo_process("uninstall foo").run(); - let data = load_crates2(); - assert_eq!(data["installs"].as_object().unwrap().len(), 0); - let v1_table = load_crates1(); - assert_eq!(v1_table.get("v1").unwrap().as_table().unwrap().len(), 0); -} - -#[cargo_test] -fn upgrade_force() { - pkg("foo", "1.0.0"); - cargo_process("install foo").run(); - cargo_process("install foo --force") - .with_stderr( - "\ -[UPDATING] `[..]` index -[INSTALLING] foo v1.0.0 -[COMPILING] foo v1.0.0 -[FINISHED] release [optimized] target(s) in [..] -[REPLACING] [..]/.cargo/bin/foo[EXE] -[REPLACED] package `foo v1.0.0` with `foo v1.0.0` (executable `foo[EXE]`) -[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] -", - ) - .run(); - validate_trackers("foo", "1.0.0", &["foo"]); -} - -#[cargo_test] -fn ambiguous_version_no_longer_allowed() { - // Non-semver-requirement is not allowed for `--version`. - pkg("foo", "1.0.0"); - cargo_process("install foo --version=1.0") - .with_stderr( - "\ -[ERROR] the `--vers` provided, `1.0`, is not a valid semver version: cannot parse '1.0' as a semver - -if you want to specify semver range, add an explicit qualifier, like ^1.0 -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn path_is_always_dirty() { - // --path should always reinstall. - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("install --path .").run(); - p.cargo("install --path .") - .with_stderr_contains("[REPLACING] [..]/foo[EXE]") - .run(); -} - -#[cargo_test] -fn fails_for_conflicts_unknown() { - // If an untracked file is in the way, it should fail. - pkg("foo", "1.0.0"); - let exe = installed_exe("foo"); - exe.parent().unwrap().mkdir_p(); - fs::write(exe, "").unwrap(); - cargo_process("install foo") - .with_stderr_contains("[ERROR] binary `foo[EXE]` already exists in destination") - .with_status(101) - .run(); -} - -#[cargo_test] -fn fails_for_conflicts_known() { - // If the same binary exists in another package, it should fail. - pkg("foo", "1.0.0"); - Package::new("bar", "1.0.0") - .file("src/bin/foo.rs", "fn main() {}") - .publish(); - cargo_process("install foo").run(); - cargo_process("install bar") - .with_stderr_contains( - "[ERROR] binary `foo[EXE]` already exists in destination as part of `foo v1.0.0`", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn supports_multiple_binary_names() { - // Can individually install with --bin or --example - Package::new("foo", "1.0.0") - .file("src/main.rs", r#"fn main() { println!("foo"); }"#) - .file("src/bin/a.rs", r#"fn main() { println!("a"); }"#) - .file("examples/ex1.rs", r#"fn main() { println!("ex1"); }"#) - .publish(); - cargo_process("install foo --bin foo").run(); - installed_process("foo").with_stdout("foo").run(); - assert!(!installed_exe("a").exists()); - assert!(!installed_exe("ex1").exists()); - validate_trackers("foo", "1.0.0", &["foo"]); - cargo_process("install foo --bin a").run(); - installed_process("a").with_stdout("a").run(); - assert!(!installed_exe("ex1").exists()); - validate_trackers("foo", "1.0.0", &["a", "foo"]); - cargo_process("install foo --example ex1").run(); - installed_process("ex1").with_stdout("ex1").run(); - validate_trackers("foo", "1.0.0", &["a", "ex1", "foo"]); - cargo_process("uninstall foo --bin foo").run(); - assert!(!installed_exe("foo").exists()); - assert!(installed_exe("ex1").exists()); - validate_trackers("foo", "1.0.0", &["a", "ex1"]); - cargo_process("uninstall foo").run(); - assert!(!installed_exe("ex1").exists()); - assert!(!installed_exe("a").exists()); -} - -#[cargo_test] -fn v1_already_installed_fresh() { - // Install with v1, then try to install again with v2. - pkg("foo", "1.0.0"); - cargo_process("install foo").run(); - cargo_process("install foo") - .with_stderr_contains("[IGNORED] package `foo v1.0.0` is already installed[..]") - .run(); -} - -#[cargo_test] -fn v1_already_installed_dirty() { - // Install with v1, then install a new version with v2. - pkg("foo", "1.0.0"); - cargo_process("install foo").run(); - pkg("foo", "1.0.1"); - cargo_process("install foo") - .with_stderr_contains("[COMPILING] foo v1.0.1") - .with_stderr_contains("[REPLACING] [..]/foo[EXE]") - .run(); - validate_trackers("foo", "1.0.1", &["foo"]); -} - -#[cargo_test] -fn change_features_rebuilds() { - Package::new("foo", "1.0.0") - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(feature = "f1") { - println!("f1"); - } - if cfg!(feature = "f2") { - println!("f2"); - } - } - "#, - ) - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [features] - f1 = [] - f2 = [] - default = ["f1"] - "#, - ) - .publish(); - cargo_process("install foo").run(); - installed_process("foo").with_stdout("f1").run(); - cargo_process("install foo --no-default-features").run(); - installed_process("foo").with_stdout("").run(); - cargo_process("install foo --all-features").run(); - installed_process("foo").with_stdout("f1\nf2").run(); - cargo_process("install foo --no-default-features --features=f1").run(); - installed_process("foo").with_stdout("f1").run(); -} - -#[cargo_test] -fn change_profile_rebuilds() { - pkg("foo", "1.0.0"); - cargo_process("install foo").run(); - cargo_process("install foo --debug") - .with_stderr_contains("[COMPILING] foo v1.0.0") - .with_stderr_contains("[REPLACING] [..]foo[EXE]") - .run(); - cargo_process("install foo --debug") - .with_stderr_contains("[IGNORED] package `foo v1.0.0` is already installed[..]") - .run(); -} - -#[cargo_test] -fn change_target_rebuilds() { - if cross_compile::disabled() { - return; - } - pkg("foo", "1.0.0"); - cargo_process("install foo").run(); - let target = cross_compile::alternate(); - cargo_process("install foo -v --target") - .arg(&target) - .with_stderr_contains("[COMPILING] foo v1.0.0") - .with_stderr_contains("[REPLACING] [..]foo[EXE]") - .with_stderr_contains(&format!("[..]--target {}[..]", target)) - .run(); -} - -#[cargo_test] -fn change_bin_sets_rebuilds() { - // Changing which bins in a multi-bin project should reinstall. - Package::new("foo", "1.0.0") - .file("src/main.rs", "fn main() { }") - .file("src/bin/x.rs", "fn main() { }") - .file("src/bin/y.rs", "fn main() { }") - .publish(); - cargo_process("install foo --bin x").run(); - assert!(installed_exe("x").exists()); - assert!(!installed_exe("y").exists()); - assert!(!installed_exe("foo").exists()); - validate_trackers("foo", "1.0.0", &["x"]); - cargo_process("install foo --bin y") - .with_stderr_contains("[INSTALLED] package `foo v1.0.0` (executable `y[EXE]`)") - .run(); - assert!(installed_exe("x").exists()); - assert!(installed_exe("y").exists()); - assert!(!installed_exe("foo").exists()); - validate_trackers("foo", "1.0.0", &["x", "y"]); - cargo_process("install foo") - .with_stderr_contains("[INSTALLED] package `foo v1.0.0` (executable `foo[EXE]`)") - .with_stderr_contains( - "[REPLACED] package `foo v1.0.0` with `foo v1.0.0` (executables `x[EXE]`, `y[EXE]`)", - ) - .run(); - assert!(installed_exe("x").exists()); - assert!(installed_exe("y").exists()); - assert!(installed_exe("foo").exists()); - validate_trackers("foo", "1.0.0", &["foo", "x", "y"]); -} - -#[cargo_test] -fn forwards_compatible() { - // Unknown fields should be preserved. - pkg("foo", "1.0.0"); - pkg("bar", "1.0.0"); - cargo_process("install foo").run(); - let key = "foo 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)"; - let v2 = cargo_home().join(".crates2.json"); - let mut data = load_crates2(); - data["newfield"] = serde_json::Value::Bool(true); - data["installs"][key]["moreinfo"] = serde_json::Value::String("shazam".to_string()); - fs::write(&v2, serde_json::to_string(&data).unwrap()).unwrap(); - cargo_process("install bar").run(); - let data: serde_json::Value = serde_json::from_str(&fs::read_to_string(&v2).unwrap()).unwrap(); - assert_eq!(data["newfield"].as_bool().unwrap(), true); - assert_eq!( - data["installs"][key]["moreinfo"].as_str().unwrap(), - "shazam" - ); -} - -#[cargo_test] -fn v2_syncs() { - // V2 inherits the installs from V1. - pkg("one", "1.0.0"); - pkg("two", "1.0.0"); - pkg("three", "1.0.0"); - let p = project() - .file("src/bin/x.rs", "fn main() {}") - .file("src/bin/y.rs", "fn main() {}") - .build(); - cargo_process("install one").run(); - validate_trackers("one", "1.0.0", &["one"]); - p.cargo("install --path .").run(); - validate_trackers("foo", "1.0.0", &["x", "y"]); - // v1 add/remove - cargo_process("install two").run(); - cargo_process("uninstall one").run(); - // This should pick up that `two` was added, `one` was removed. - cargo_process("install three").run(); - validate_trackers("three", "1.0.0", &["three"]); - cargo_process("install --list") - .with_stdout( - "\ -foo v0.0.1 ([..]/foo): - x[EXE] - y[EXE] -three v1.0.0: - three[EXE] -two v1.0.0: - two[EXE] -", - ) - .run(); - cargo_process("install one").run(); - installed_process("one").with_stdout("1.0.0").run(); - validate_trackers("one", "1.0.0", &["one"]); - cargo_process("install two") - .with_stderr_contains("[IGNORED] package `two v1.0.0` is already installed[..]") - .run(); - // v1 remove - p.cargo("uninstall --bin x").run(); - pkg("x", "1.0.0"); - pkg("y", "1.0.0"); - // This should succeed because `x` was removed in V1. - cargo_process("install x").run(); - validate_trackers("x", "1.0.0", &["x"]); - // This should fail because `y` still exists in a different package. - cargo_process("install y") - .with_stderr_contains( - "[ERROR] binary `y[EXE]` already exists in destination \ - as part of `foo v0.0.1 ([..])`", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn upgrade_git() { - let git_project = git::new("foo", |project| project.file("src/main.rs", "fn main() {}")); - // install - cargo_process("install --git") - .arg(git_project.url().to_string()) - .run(); - // Check install stays fresh. - cargo_process("install --git") - .arg(git_project.url().to_string()) - .with_stderr_contains( - "[IGNORED] package `foo v0.0.1 (file://[..]/foo#[..])` is \ - already installed,[..]", - ) - .run(); - // Modify a file. - let repo = git2::Repository::open(git_project.root()).unwrap(); - git_project.change_file("src/main.rs", r#"fn main() {println!("onomatopoeia");}"#); - git::add(&repo); - git::commit(&repo); - // Install should reinstall. - cargo_process("install --git") - .arg(git_project.url().to_string()) - .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") - .with_stderr_contains("[REPLACING] [..]/foo[EXE]") - .run(); - installed_process("foo").with_stdout("onomatopoeia").run(); - // Check install stays fresh. - cargo_process("install --git") - .arg(git_project.url().to_string()) - .with_stderr_contains( - "[IGNORED] package `foo v0.0.1 (file://[..]/foo#[..])` is \ - already installed,[..]", - ) - .run(); -} - -#[cargo_test] -fn switch_sources() { - // Installing what appears to be the same thing, but from different - // sources should reinstall. - registry::alt_init(); - pkg("foo", "1.0.0"); - Package::new("foo", "1.0.0") - .file("src/main.rs", r#"fn main() { println!("alt"); }"#) - .alternative(true) - .publish(); - let p = project() - .at("foo-local") // so it doesn't use the same directory as the git project - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/main.rs", r#"fn main() { println!("local"); }"#) - .build(); - let git_project = git::new("foo", |project| { - project.file("src/main.rs", r#"fn main() { println!("git"); }"#) - }); - - cargo_process("install foo").run(); - installed_process("foo").with_stdout("1.0.0").run(); - cargo_process("install foo --registry alternative").run(); - installed_process("foo").with_stdout("alt").run(); - p.cargo("install --path .").run(); - installed_process("foo").with_stdout("local").run(); - cargo_process("install --git") - .arg(git_project.url().to_string()) - .run(); - installed_process("foo").with_stdout("git").run(); -} - -#[cargo_test] -fn multiple_report() { - // Testing the full output that indicates installed/ignored/replaced/summary. - pkg("one", "1.0.0"); - pkg("two", "1.0.0"); - fn three(vers: &str) { - Package::new("three", vers) - .file("src/main.rs", "fn main() { }") - .file("src/bin/x.rs", "fn main() { }") - .file("src/bin/y.rs", "fn main() { }") - .publish(); - } - three("1.0.0"); - cargo_process("install one two three") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] one v1.0.0 (registry `[..]`) -[DOWNLOADING] crates ... -[DOWNLOADED] two v1.0.0 (registry `[..]`) -[DOWNLOADING] crates ... -[DOWNLOADED] three v1.0.0 (registry `[..]`) -[INSTALLING] one v1.0.0 -[COMPILING] one v1.0.0 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]/.cargo/bin/one[EXE] -[INSTALLED] package `one v1.0.0` (executable `one[EXE]`) -[INSTALLING] two v1.0.0 -[COMPILING] two v1.0.0 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]/.cargo/bin/two[EXE] -[INSTALLED] package `two v1.0.0` (executable `two[EXE]`) -[INSTALLING] three v1.0.0 -[COMPILING] three v1.0.0 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]/.cargo/bin/three[EXE] -[INSTALLING] [..]/.cargo/bin/x[EXE] -[INSTALLING] [..]/.cargo/bin/y[EXE] -[INSTALLED] package `three v1.0.0` (executables `three[EXE]`, `x[EXE]`, `y[EXE]`) -[SUMMARY] Successfully installed one, two, three! -[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] -", - ) - .run(); - pkg("foo", "1.0.1"); - pkg("bar", "1.0.1"); - three("1.0.1"); - cargo_process("install one two three") - .with_stderr( - "\ -[UPDATING] `[..]` index -[IGNORED] package `one v1.0.0` is already installed, use --force to override -[IGNORED] package `two v1.0.0` is already installed, use --force to override -[DOWNLOADING] crates ... -[DOWNLOADED] three v1.0.1 (registry `[..]`) -[INSTALLING] three v1.0.1 -[COMPILING] three v1.0.1 -[FINISHED] release [optimized] target(s) in [..] -[REPLACING] [..]/.cargo/bin/three[EXE] -[REPLACING] [..]/.cargo/bin/x[EXE] -[REPLACING] [..]/.cargo/bin/y[EXE] -[REPLACED] package `three v1.0.0` with `three v1.0.1` (executables `three[EXE]`, `x[EXE]`, `y[EXE]`) -[SUMMARY] Successfully installed one, two, three! -[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] -", - ) - .run(); - cargo_process("uninstall three") - .with_stderr( - "\ -[REMOVING] [..]/.cargo/bin/three[EXE] -[REMOVING] [..]/.cargo/bin/x[EXE] -[REMOVING] [..]/.cargo/bin/y[EXE] -", - ) - .run(); - cargo_process("install three --bin x") - .with_stderr( - "\ -[UPDATING] `[..]` index -[INSTALLING] three v1.0.1 -[COMPILING] three v1.0.1 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]/.cargo/bin/x[EXE] -[INSTALLED] package `three v1.0.1` (executable `x[EXE]`) -[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] -", - ) - .run(); - cargo_process("install three") - .with_stderr( - "\ -[UPDATING] `[..]` index -[INSTALLING] three v1.0.1 -[COMPILING] three v1.0.1 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]/.cargo/bin/three[EXE] -[INSTALLING] [..]/.cargo/bin/y[EXE] -[REPLACING] [..]/.cargo/bin/x[EXE] -[INSTALLED] package `three v1.0.1` (executables `three[EXE]`, `y[EXE]`) -[REPLACED] package `three v1.0.1` with `three v1.0.1` (executable `x[EXE]`) -[WARNING] be sure to add `[..]/.cargo/bin` to your PATH [..] -", - ) - .run(); -} - -#[cargo_test] -fn no_track() { - pkg("foo", "1.0.0"); - cargo_process("install --no-track foo").run(); - assert!(!v1_path().exists()); - assert!(!v2_path().exists()); - cargo_process("install --no-track foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[ERROR] binary `foo[EXE]` already exists in destination `[..]/.cargo/bin/foo[EXE]` -Add --force to overwrite -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn deletes_orphaned() { - // When an executable is removed from a project, upgrading should remove it. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("src/bin/other.rs", "fn main() {}") - .file("examples/ex1.rs", "fn main() {}") - .build(); - p.cargo("install --path . --bins --examples").run(); - assert!(installed_exe("other").exists()); - - // Remove a binary, add a new one, and bump the version. - fs::remove_file(p.root().join("src/bin/other.rs")).unwrap(); - p.change_file("examples/ex2.rs", "fn main() {}"); - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.2.0" - "#, - ); - p.cargo("install --path . --bins --examples") - .with_stderr( - "\ -[INSTALLING] foo v0.2.0 [..] -[COMPILING] foo v0.2.0 [..] -[FINISHED] release [..] -[INSTALLING] [..]/.cargo/bin/ex2[EXE] -[REPLACING] [..]/.cargo/bin/ex1[EXE] -[REPLACING] [..]/.cargo/bin/foo[EXE] -[REMOVING] executable `[..]/.cargo/bin/other[EXE]` from previous version foo v0.1.0 [..] -[INSTALLED] package `foo v0.2.0 [..]` (executable `ex2[EXE]`) -[REPLACED] package `foo v0.1.0 [..]` with `foo v0.2.0 [..]` (executables `ex1[EXE]`, `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); - assert!(!installed_exe("other").exists()); - validate_trackers("foo", "0.2.0", &["foo", "ex1", "ex2"]); - // 0.1.0 should not have any entries. - validate_trackers("foo", "0.1.0", &[]); -} - -#[cargo_test] -fn already_installed_exact_does_not_update() { - pkg("foo", "1.0.0"); - cargo_process("install foo --version=1.0.0").run(); - cargo_process("install foo --version=1.0.0") - .with_stderr( - "\ -[IGNORED] package `foo v1.0.0` is already installed[..] -[WARNING] be sure to add [..] -", - ) - .run(); - - cargo_process("install foo --version=>=1.0.0") - .with_stderr( - "\ -[UPDATING] `[..]` index -[IGNORED] package `foo v1.0.0` is already installed[..] -[WARNING] be sure to add [..] -", - ) - .run(); - pkg("foo", "1.0.1"); - cargo_process("install foo --version=>=1.0.0") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v1.0.1 (registry [..]) -[INSTALLING] foo v1.0.1 -[COMPILING] foo v1.0.1 -[FINISHED] release [optimized] target(s) in [..] -[REPLACING] [CWD]/home/.cargo/bin/foo[EXE] -[REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); -} - -#[cargo_test] -fn already_installed_updates_yank_status_on_upgrade() { - pkg("foo", "1.0.0"); - pkg_maybe_yanked("foo", "1.0.1", true); - cargo_process("install foo --version=1.0.0").run(); - - cargo_process("install foo --version=1.0.1") - .with_status(101) - .with_stderr_contains( - "\ -[ERROR] cannot install package `foo`, it has been yanked from registry `crates-io` -", - ) - .run(); - - pkg_maybe_yanked("foo", "1.0.1", false); - - pkg("foo", "1.0.1"); - cargo_process("install foo --version=1.0.1") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v1.0.1 (registry [..]) -[INSTALLING] foo v1.0.1 -[COMPILING] foo v1.0.1 -[FINISHED] release [optimized] target(s) in [..] -[REPLACING] [CWD]/home/.cargo/bin/foo[EXE] -[REPLACED] package `foo v1.0.0` with `foo v1.0.1` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); -} - -#[cargo_test] -fn partially_already_installed_does_one_update() { - pkg("foo", "1.0.0"); - cargo_process("install foo --version=1.0.0").run(); - pkg("bar", "1.0.0"); - pkg("baz", "1.0.0"); - cargo_process("install foo bar baz --version=1.0.0") - .with_stderr( - "\ -[IGNORED] package `foo v1.0.0` is already installed[..] -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.0 (registry [..]) -[DOWNLOADING] crates ... -[DOWNLOADED] baz v1.0.0 (registry [..]) -[INSTALLING] bar v1.0.0 -[COMPILING] bar v1.0.0 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/bar[EXE] -[INSTALLED] package `bar v1.0.0` (executable `bar[EXE]`) -[INSTALLING] baz v1.0.0 -[COMPILING] baz v1.0.0 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [CWD]/home/.cargo/bin/baz[EXE] -[INSTALLED] package `baz v1.0.0` (executable `baz[EXE]`) -[SUMMARY] Successfully installed foo, bar, baz! -[WARNING] be sure to add [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/jobserver.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/jobserver.rs deleted file mode 100644 index bcb1fdf95..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/jobserver.rs +++ /dev/null @@ -1,206 +0,0 @@ -//! Tests for the jobserver protocol. - -use std::net::TcpListener; -use std::process::Command; -use std::thread; - -use cargo_test_support::{cargo_exe, project}; - -#[cargo_test] -fn jobserver_exists() { - let p = project() - .file( - "build.rs", - r#" - use std::env; - - fn main() { - let var = env::var("CARGO_MAKEFLAGS").unwrap(); - let arg = var.split(' ') - .find(|p| p.starts_with("--jobserver")) - .unwrap(); - let val = &arg[arg.find('=').unwrap() + 1..]; - validate(val); - } - - #[cfg(unix)] - fn validate(s: &str) { - use std::fs::File; - use std::io::*; - use std::os::unix::prelude::*; - - let fds = s.split(',').collect::>(); - println!("{}", s); - assert_eq!(fds.len(), 2); - unsafe { - let mut read = File::from_raw_fd(fds[0].parse().unwrap()); - let mut write = File::from_raw_fd(fds[1].parse().unwrap()); - - let mut buf = [0]; - assert_eq!(read.read(&mut buf).unwrap(), 1); - assert_eq!(write.write(&buf).unwrap(), 1); - } - } - - #[cfg(windows)] - fn validate(_: &str) { - // a little too complicated for a test... - } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Explicitly use `-j2` to ensure that there's eventually going to be a - // token to read from `validate` above, since running the build script - // itself consumes a token. - p.cargo("build -j2").run(); -} - -#[cargo_test] -fn makes_jobserver_used() { - let make = if cfg!(windows) { - "mingw32-make" - } else { - "make" - }; - if Command::new(make).arg("--version").output().is_err() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - d1 = { path = "d1" } - d2 = { path = "d2" } - d3 = { path = "d3" } - "#, - ) - .file("src/lib.rs", "") - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - build = "../dbuild.rs" - "#, - ) - .file("d1/src/lib.rs", "") - .file( - "d2/Cargo.toml", - r#" - [package] - name = "d2" - version = "0.0.1" - authors = [] - build = "../dbuild.rs" - "#, - ) - .file("d2/src/lib.rs", "") - .file( - "d3/Cargo.toml", - r#" - [package] - name = "d3" - version = "0.0.1" - authors = [] - build = "../dbuild.rs" - "#, - ) - .file("d3/src/lib.rs", "") - .file( - "dbuild.rs", - r#" - use std::net::TcpStream; - use std::env; - use std::io::Read; - - fn main() { - let addr = env::var("ADDR").unwrap(); - let mut stream = TcpStream::connect(addr).unwrap(); - let mut v = Vec::new(); - stream.read_to_end(&mut v).unwrap(); - } - "#, - ) - .file( - "Makefile", - "\ -all: -\t+$(CARGO) build -", - ) - .build(); - - let l = TcpListener::bind("127.0.0.1:0").unwrap(); - let addr = l.local_addr().unwrap(); - - let child = thread::spawn(move || { - let a1 = l.accept().unwrap(); - let a2 = l.accept().unwrap(); - l.set_nonblocking(true).unwrap(); - - for _ in 0..1000 { - assert!(l.accept().is_err()); - thread::yield_now(); - } - - drop(a1); - l.set_nonblocking(false).unwrap(); - let a3 = l.accept().unwrap(); - - drop((a2, a3)); - }); - - p.process(make) - .env("CARGO", cargo_exe()) - .env("ADDR", addr.to_string()) - .arg("-j2") - .run(); - child.join().unwrap(); -} - -#[cargo_test] -fn jobserver_and_j() { - let make = if cfg!(windows) { - "mingw32-make" - } else { - "make" - }; - if Command::new(make).arg("--version").output().is_err() { - return; - } - - let p = project() - .file("src/lib.rs", "") - .file( - "Makefile", - "\ -all: -\t+$(CARGO) build -j2 -", - ) - .build(); - - p.process(make) - .env("CARGO", cargo_exe()) - .arg("-j2") - .with_stderr( - "\ -warning: a `-j` argument was passed to Cargo but Cargo is also configured \ -with an external jobserver in its environment, ignoring the `-j` parameter -[COMPILING] [..] -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/list_availables.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/list_availables.rs deleted file mode 100644 index 6bbbeb160..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/list_availables.rs +++ /dev/null @@ -1,232 +0,0 @@ -//! Tests for packages/target filter flags giving suggestions on which -//! packages/targets are available. - -use cargo_test_support::project; - -const EXAMPLE: u8 = 1 << 0; -const BIN: u8 = 1 << 1; -const TEST: u8 = 1 << 2; -const BENCH: u8 = 1 << 3; -const PACKAGE: u8 = 1 << 4; - -fn list_availables_test(command: &str, targets: u8) { - let full_project = project() - .file("examples/a.rs", "fn main() { }") - .file("examples/b.rs", "fn main() { }") - .file("benches/bench1.rs", "") - .file("benches/bench2.rs", "") - .file("tests/test1.rs", "") - .file("tests/test2.rs", "") - .file("src/main.rs", "fn main() { }") - .file("Cargo.lock", "") // for `cargo pkgid` - .build(); - - if targets & EXAMPLE != 0 { - full_project - .cargo(&format!("{} --example", command)) - .with_stderr( - "\ -error: \"--example\" takes one argument. -Available examples: - a - b - -", - ) - .with_status(101) - .run(); - } - - if targets & BIN != 0 { - full_project - .cargo(&format!("{} --bin", command)) - .with_stderr( - "\ -error: \"--bin\" takes one argument. -Available binaries: - foo - -", - ) - .with_status(101) - .run(); - } - - if targets & BENCH != 0 { - full_project - .cargo(&format!("{} --bench", command)) - .with_stderr( - "\ -error: \"--bench\" takes one argument. -Available benches: - bench1 - bench2 - -", - ) - .with_status(101) - .run(); - } - - if targets & TEST != 0 { - full_project - .cargo(&format!("{} --test", command)) - .with_stderr( - "\ -error: \"--test\" takes one argument. -Available tests: - test1 - test2 - -", - ) - .with_status(101) - .run(); - } - - if targets & PACKAGE != 0 { - full_project - .cargo(&format!("{} -p", command)) - .with_stderr( - "\ -[ERROR] \"--package \" requires a SPEC format value, \ -which can be any package ID specifier in the dependency graph. -Run `cargo help pkgid` for more information about SPEC format. - -Possible packages/workspace members: - foo - -", - ) - .with_status(101) - .run(); - } - - let empty_project = project().file("src/lib.rs", "").build(); - - if targets & EXAMPLE != 0 { - empty_project - .cargo(&format!("{} --example", command)) - .with_stderr( - "\ -error: \"--example\" takes one argument. -No examples available. - -", - ) - .with_status(101) - .run(); - } - - if targets & BIN != 0 { - empty_project - .cargo(&format!("{} --bin", command)) - .with_stderr( - "\ -error: \"--bin\" takes one argument. -No binaries available. - -", - ) - .with_status(101) - .run(); - } - - if targets & BENCH != 0 { - empty_project - .cargo(&format!("{} --bench", command)) - .with_stderr( - "\ -error: \"--bench\" takes one argument. -No benches available. - -", - ) - .with_status(101) - .run(); - } - - if targets & TEST != 0 { - empty_project - .cargo(&format!("{} --test", command)) - .with_stderr( - "\ -error: \"--test\" takes one argument. -No tests available. - -", - ) - .with_status(101) - .run(); - } -} - -#[cargo_test] -fn build_list_availables() { - list_availables_test("build", EXAMPLE | BIN | TEST | BENCH | PACKAGE); -} - -#[cargo_test] -fn check_list_availables() { - list_availables_test("check", EXAMPLE | BIN | TEST | BENCH | PACKAGE); -} - -#[cargo_test] -fn doc_list_availables() { - list_availables_test("doc", BIN | PACKAGE); -} - -#[cargo_test] -fn fix_list_availables() { - list_availables_test("fix", EXAMPLE | BIN | TEST | BENCH | PACKAGE); -} - -#[cargo_test] -fn run_list_availables() { - list_availables_test("run", EXAMPLE | BIN | PACKAGE); -} - -#[cargo_test] -fn test_list_availables() { - list_availables_test("test", EXAMPLE | BIN | TEST | BENCH | PACKAGE); -} - -#[cargo_test] -fn bench_list_availables() { - list_availables_test("bench", EXAMPLE | BIN | TEST | BENCH | PACKAGE); -} - -#[cargo_test] -fn install_list_availables() { - list_availables_test("install", EXAMPLE | BIN); -} - -#[cargo_test] -fn rustdoc_list_availables() { - list_availables_test("rustdoc", EXAMPLE | BIN | TEST | BENCH | PACKAGE); -} - -#[cargo_test] -fn rustc_list_availables() { - list_availables_test("rustc", EXAMPLE | BIN | TEST | BENCH | PACKAGE); -} - -#[cargo_test] -fn pkgid_list_availables() { - list_availables_test("pkgid", PACKAGE); -} - -#[cargo_test] -fn tree_list_availables() { - list_availables_test("tree", PACKAGE); -} - -#[cargo_test] -fn clean_list_availables() { - list_availables_test("clean", PACKAGE); -} - -#[cargo_test] -fn update_list_availables() { - list_availables_test("update", PACKAGE); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/local_registry.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/local_registry.rs deleted file mode 100644 index 1a9cf1f8c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/local_registry.rs +++ /dev/null @@ -1,490 +0,0 @@ -//! Tests for local-registry sources. - -use cargo_test_support::paths::{self, CargoPathExt}; -use cargo_test_support::registry::{registry_path, Package}; -use cargo_test_support::{basic_manifest, project, t}; -use std::fs; - -fn setup() { - let root = paths::root(); - t!(fs::create_dir(&root.join(".cargo"))); - t!(fs::write( - root.join(".cargo/config"), - r#" - [source.crates-io] - registry = 'https://wut' - replace-with = 'my-awesome-local-registry' - - [source.my-awesome-local-registry] - local-registry = 'registry' - "# - )); -} - -#[cargo_test] -fn simple() { - setup(); - Package::new("bar", "0.0.1") - .local(true) - .file("src/lib.rs", "pub fn bar() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.0.1" - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UNPACKING] bar v0.0.1 ([..]) -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); - p.cargo("test").run(); -} - -#[cargo_test] -fn depend_on_yanked() { - setup(); - Package::new("bar", "0.0.1").local(true).publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Run cargo to create lock file. - p.cargo("check").run(); - - registry_path().join("index").join("3").rm_rf(); - Package::new("bar", "0.0.1") - .local(true) - .yanked(true) - .publish(); - - p.cargo("check") - .with_stderr( - "\ -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn multiple_versions() { - setup(); - Package::new("bar", "0.0.1").local(true).publish(); - Package::new("bar", "0.1.0") - .local(true) - .file("src/lib.rs", "pub fn bar() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UNPACKING] bar v0.1.0 ([..]) -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] [..] -", - ) - .run(); - - Package::new("bar", "0.2.0") - .local(true) - .file("src/lib.rs", "pub fn bar() {}") - .publish(); - - p.cargo("update -v") - .with_stderr("[UPDATING] bar v0.1.0 -> v0.2.0") - .run(); -} - -#[cargo_test] -fn multiple_names() { - setup(); - Package::new("bar", "0.0.1") - .local(true) - .file("src/lib.rs", "pub fn bar() {}") - .publish(); - Package::new("baz", "0.1.0") - .local(true) - .file("src/lib.rs", "pub fn baz() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - baz = "*" - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar; - extern crate baz; - pub fn foo() { - bar::bar(); - baz::baz(); - } - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UNPACKING] [..] -[UNPACKING] [..] -[COMPILING] [..] -[COMPILING] [..] -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn interdependent() { - setup(); - Package::new("bar", "0.0.1") - .local(true) - .file("src/lib.rs", "pub fn bar() {}") - .publish(); - Package::new("baz", "0.1.0") - .local(true) - .dep("bar", "*") - .file("src/lib.rs", "extern crate bar; pub fn baz() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - baz = "*" - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar; - extern crate baz; - pub fn foo() { - bar::bar(); - baz::baz(); - } - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UNPACKING] [..] -[UNPACKING] [..] -[COMPILING] bar v0.0.1 -[COMPILING] baz v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn path_dep_rewritten() { - setup(); - Package::new("bar", "0.0.1") - .local(true) - .file("src/lib.rs", "pub fn bar() {}") - .publish(); - Package::new("baz", "0.1.0") - .local(true) - .dep("bar", "*") - .file( - "Cargo.toml", - r#" - [project] - name = "baz" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "bar", version = "*" } - "#, - ) - .file("src/lib.rs", "extern crate bar; pub fn baz() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - baz = "*" - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar; - extern crate baz; - pub fn foo() { - bar::bar(); - baz::baz(); - } - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UNPACKING] [..] -[UNPACKING] [..] -[COMPILING] bar v0.0.1 -[COMPILING] baz v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn invalid_dir_bad() { - setup(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [source.crates-io] - registry = 'https://wut' - replace-with = 'my-awesome-local-directory' - - [source.my-awesome-local-directory] - local-registry = '/path/to/nowhere' - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 [..]` - -Caused by: - failed to load source for dependency `bar` - -Caused by: - Unable to update registry `crates-io` - -Caused by: - failed to update replaced source registry `crates-io` - -Caused by: - local registry path is not a directory: [..]path[..]to[..]nowhere -", - ) - .run(); -} - -#[cargo_test] -fn different_directory_replacing_the_registry_is_bad() { - setup(); - - // Move our test's .cargo/config to a temporary location and publish a - // registry package we're going to use first. - let config = paths::root().join(".cargo"); - let config_tmp = paths::root().join(".cargo-old"); - t!(fs::rename(&config, &config_tmp)); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Generate a lock file against the crates.io registry - Package::new("bar", "0.0.1").publish(); - p.cargo("build").run(); - - // Switch back to our directory source, and now that we're replacing - // crates.io make sure that this fails because we're replacing with a - // different checksum - config.rm_rf(); - t!(fs::rename(&config_tmp, &config)); - Package::new("bar", "0.0.1") - .file("src/lib.rs", "invalid") - .local(true) - .publish(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] checksum for `bar v0.0.1` changed between lock files - -this could be indicative of a few possible errors: - - * the lock file is corrupt - * a replacement source in use (e.g., a mirror) returned a different checksum - * the source itself may be corrupt in one way or another - -unable to verify that `bar v0.0.1` is the same as when the lockfile was generated - -", - ) - .run(); -} - -#[cargo_test] -fn crates_io_registry_url_is_optional() { - let root = paths::root(); - t!(fs::create_dir(&root.join(".cargo"))); - t!(fs::write( - root.join(".cargo/config"), - r#" - [source.crates-io] - replace-with = 'my-awesome-local-registry' - - [source.my-awesome-local-registry] - local-registry = 'registry' - "# - )); - - Package::new("bar", "0.0.1") - .local(true) - .file("src/lib.rs", "pub fn bar() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.0.1" - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UNPACKING] bar v0.0.1 ([..]) -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); - p.cargo("test").run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/locate_project.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/locate_project.rs deleted file mode 100644 index 7e8ceb4c6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/locate_project.rs +++ /dev/null @@ -1,76 +0,0 @@ -//! Tests for the `cargo locate-project` command. - -use cargo_test_support::project; - -#[cargo_test] -fn simple() { - let p = project().build(); - - p.cargo("locate-project") - .with_json(r#"{"root": "[ROOT]/foo/Cargo.toml"}"#) - .run(); -} - -#[cargo_test] -fn message_format() { - let p = project().build(); - - p.cargo("locate-project --message-format plain") - .with_stdout("[ROOT]/foo/Cargo.toml") - .run(); - - p.cargo("locate-project --message-format json") - .with_json(r#"{"root": "[ROOT]/foo/Cargo.toml"}"#) - .run(); - - p.cargo("locate-project --message-format cryptic") - .with_stderr("error: invalid message format specifier: `cryptic`") - .with_status(101) - .run(); -} - -#[cargo_test] -fn workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "outer" - version = "0.0.0" - - [workspace] - members = ["inner"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "inner/Cargo.toml", - r#" - [package] - name = "inner" - version = "0.0.0" - "#, - ) - .file("inner/src/lib.rs", "") - .build(); - - let outer_manifest = r#"{"root": "[ROOT]/foo/Cargo.toml"}"#; - let inner_manifest = r#"{"root": "[ROOT]/foo/inner/Cargo.toml"}"#; - - p.cargo("locate-project").with_json(outer_manifest).run(); - - p.cargo("locate-project") - .cwd("inner") - .with_json(inner_manifest) - .run(); - - p.cargo("locate-project --workspace") - .with_json(outer_manifest) - .run(); - - p.cargo("locate-project --workspace") - .cwd("inner") - .with_json(outer_manifest) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/lockfile_compat.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/lockfile_compat.rs deleted file mode 100644 index 0be1d4aa6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/lockfile_compat.rs +++ /dev/null @@ -1,838 +0,0 @@ -//! Tests for supporting older versions of the Cargo.lock file format. - -use cargo_test_support::compare::assert_match_exact; -use cargo_test_support::git; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_lib_manifest, basic_manifest, project}; - -#[cargo_test] -fn oldest_lockfile_still_works() { - let cargo_commands = vec!["build", "update"]; - for cargo_command in cargo_commands { - oldest_lockfile_still_works_with_command(cargo_command); - } -} - -fn oldest_lockfile_still_works_with_command(cargo_command: &str) { - Package::new("bar", "0.1.0").publish(); - - let expected_lockfile = r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "[..]" - -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar", -] -"#; - - let old_lockfile = r#" -[root] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -"#; - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file("Cargo.lock", old_lockfile) - .build(); - - p.cargo(cargo_command).run(); - - let lock = p.read_lockfile(); - assert_match_exact(expected_lockfile, &lock); -} - -#[cargo_test] -fn frozen_flag_preserves_old_lockfile() { - let cksum = Package::new("bar", "0.1.0").publish(); - - let old_lockfile = format!( - r#"[root] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" -"#, - cksum, - ); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file("Cargo.lock", &old_lockfile) - .build(); - - p.cargo("build --locked").run(); - - let lock = p.read_lockfile(); - assert_match_exact(&old_lockfile, &lock); -} - -#[cargo_test] -fn totally_wild_checksums_works() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file( - "Cargo.lock", - r#" -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" -"checksum bar 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" -"#, - ); - - let p = p.build(); - - p.cargo("build").run(); - - let lock = p.read_lockfile(); - assert_match_exact( - r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "[..]" - -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar", -] -"#, - &lock, - ); -} - -#[cargo_test] -fn wrong_checksum_is_an_error() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file( - "Cargo.lock", - r#" -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" -"#, - ); - - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] `[..]` index -error: checksum for `bar v0.1.0` changed between lock files - -this could be indicative of a few possible errors: - - * the lock file is corrupt - * a replacement source in use (e.g., a mirror) returned a different checksum - * the source itself may be corrupt in one way or another - -unable to verify that `bar v0.1.0` is the same as when the lockfile was generated - -", - ) - .run(); -} - -// If the checksum is unlisted in the lock file (e.g., ) yet we can -// calculate it (e.g., it's a registry dep), then we should in theory just fill -// it in. -#[cargo_test] -fn unlisted_checksum_is_bad_if_we_calculate() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file( - "Cargo.lock", - r#" -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "" -"#, - ); - let p = p.build(); - - p.cargo("fetch") - .with_status(101) - .with_stderr( - "\ -[UPDATING] `[..]` index -error: checksum for `bar v0.1.0` was not previously calculated, but a checksum \ -could now be calculated - -this could be indicative of a few possible situations: - - * the source `[..]` did not previously support checksums, - but was replaced with one that does - * newer Cargo implementations know how to checksum this source, but this - older implementation does not - * the lock file is corrupt - -", - ) - .run(); -} - -// If the checksum is listed in the lock file yet we cannot calculate it (e.g., -// Git dependencies as of today), then make sure we choke. -#[cargo_test] -fn listed_checksum_bad_if_we_cannot_compute() { - let git = git::new("bar", |p| { - p.file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = {{ git = '{}' }} - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .file( - "Cargo.lock", - &format!( - r#" -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar 0.1.0 (git+{0})" -] - -[[package]] -name = "bar" -version = "0.1.0" -source = "git+{0}" - -[metadata] -"checksum bar 0.1.0 (git+{0})" = "checksum" -"#, - git.url() - ), - ); - - let p = p.build(); - - p.cargo("fetch") - .with_status(101) - .with_stderr( - "\ -[UPDATING] git repository `[..]` -error: checksum for `bar v0.1.0 ([..])` could not be calculated, but a \ -checksum is listed in the existing lock file[..] - -this could be indicative of a few possible situations: - - * the source `[..]` supports checksums, - but was replaced with one that doesn't - * the lock file is corrupt - -unable to verify that `bar v0.1.0 ([..])` is the same as when the lockfile was generated - -", - ) - .run(); -} - -#[cargo_test] -fn current_lockfile_format() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").run(); - - let actual = p.read_lockfile(); - - let expected = "\ -# This file is automatically @generated by Cargo.\n# It is not intended for manual editing. -version = 3 - -[[package]] -name = \"bar\" -version = \"0.1.0\" -source = \"registry+https://github.com/rust-lang/crates.io-index\" -checksum = \"[..]\" - -[[package]] -name = \"foo\" -version = \"0.0.1\" -dependencies = [ - \"bar\", -] -"; - assert_match_exact(expected, &actual); -} - -#[cargo_test] -fn lockfile_without_root() { - Package::new("bar", "0.1.0").publish(); - - let lockfile = r#" -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar", -] -"#; - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file("Cargo.lock", lockfile); - - let p = p.build(); - - p.cargo("build").run(); - - let lock = p.read_lockfile(); - assert_match_exact( - r#"# [..] -# [..] -version = 3 - -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "[..]" - -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar", -] -"#, - &lock, - ); -} - -#[cargo_test] -fn locked_correct_error() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", ""); - let p = p.build(); - - p.cargo("build --locked") - .with_status(101) - .with_stderr( - "\ -[UPDATING] `[..]` index -error: the lock file [CWD]/Cargo.lock needs to be updated but --locked was passed to prevent this -If you want to try to generate the lock file without accessing the network, \ -remove the --locked flag and use --offline instead. -", - ) - .run(); -} - -#[cargo_test] -fn v2_format_preserved() { - let cksum = Package::new("bar", "0.1.0").publish(); - - let lockfile = format!( - r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "{}" - -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar", -] -"#, - cksum - ); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file("Cargo.lock", &lockfile) - .build(); - - p.cargo("fetch").run(); - - let lock = p.read_lockfile(); - assert_match_exact(&lockfile, &lock); -} - -#[cargo_test] -fn v2_path_and_crates_io() { - let cksum010 = Package::new("a", "0.1.0").publish(); - let cksum020 = Package::new("a", "0.2.0").publish(); - - let lockfile = format!( - r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "a" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "{}" - -[[package]] -name = "a" -version = "0.2.0" - -[[package]] -name = "a" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "{}" - -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "a 0.1.0", - "a 0.2.0", - "a 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", -] -"#, - cksum010, cksum020, - ); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = 'a' } - b = { version = "0.1", package = 'a' } - c = { version = "0.2", package = 'a' } - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.2.0" - "#, - ) - .file("a/src/lib.rs", "") - .file("Cargo.lock", &lockfile) - .build(); - - p.cargo("fetch").run(); - p.cargo("fetch").run(); - - let lock = p.read_lockfile(); - assert_match_exact(&lockfile, &lock); -} - -#[cargo_test] -fn v3_and_git() { - let (git_project, repo) = git::new_repo("dep1", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("dep1")) - .file("src/lib.rs", "") - }); - let head_id = repo.head().unwrap().target().unwrap(); - - let lockfile = format!( - r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "dep1" -version = "0.5.0" -source = "git+{}?branch=master#{}" - -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "dep1", -] -"#, - git_project.url(), - head_id, - ); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - dep1 = {{ git = '{}', branch = 'master' }} - "#, - git_project.url(), - ), - ) - .file("src/lib.rs", "") - .file("Cargo.lock", "version = 3") - .build(); - - p.cargo("fetch").run(); - - let lock = p.read_lockfile(); - assert_match_exact(&lockfile, &lock); -} - -#[cargo_test] -fn lock_from_the_future() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - "#, - ) - .file("src/lib.rs", "") - .file("Cargo.lock", "version = 10000000") - .build(); - - p.cargo("fetch") - .with_stderr( - "\ -error: failed to parse lock file at: [..] - -Caused by: - lock file version `10000000` was found, but this version of Cargo does not \ - understand this lock file, perhaps Cargo needs to be updated? -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn preserve_old_format_if_no_update_needed() { - let cksum = Package::new("bar", "0.1.0").publish(); - let lockfile = format!( - r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "foo" -version = "0.0.1" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[metadata] -"checksum bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" -"#, - cksum - ); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file("Cargo.lock", &lockfile) - .build(); - - p.cargo("build --locked").run(); -} - -#[cargo_test] -fn same_name_version_different_sources() { - let cksum = Package::new("foo", "0.1.0").publish(); - let (git_project, repo) = git::new_repo("dep1", |project| { - project - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - }); - let head_id = repo.head().unwrap().target().unwrap(); - - // Lockfile was generated with Rust 1.51 - let lockfile = format!( - r#"# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "foo" -version = "0.1.0" -dependencies = [ - "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "foo 0.1.0 (git+{url})", -] - -[[package]] -name = "foo" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "{cksum}" - -[[package]] -name = "foo" -version = "0.1.0" -source = "git+{url}#{sha}" -"#, - sha = head_id, - url = git_project.url(), - cksum = cksum - ); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - foo = "0.1.0" - foo2 = {{ git = '{}', package = 'foo' }} - "#, - git_project.url(), - ), - ) - .file("src/lib.rs", "") - .file("Cargo.lock", &lockfile) - .build(); - - p.cargo("build").run(); - - assert_eq!(p.read_file("Cargo.lock"), lockfile); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/login.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/login.rs deleted file mode 100644 index b69616ff8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/login.rs +++ /dev/null @@ -1,108 +0,0 @@ -//! Tests for the `cargo login` command. - -use cargo_test_support::install::cargo_home; -use cargo_test_support::registry; -use cargo_test_support::{cargo_process, paths, t}; -use std::fs::{self, OpenOptions}; -use std::io::prelude::*; -use std::path::PathBuf; - -const TOKEN: &str = "test-token"; -const TOKEN2: &str = "test-token2"; -const ORIGINAL_TOKEN: &str = "api-token"; - -fn setup_new_credentials() { - let config = cargo_home().join("credentials"); - setup_new_credentials_at(config); -} - -fn setup_new_credentials_at(config: PathBuf) { - t!(fs::create_dir_all(config.parent().unwrap())); - t!(fs::write( - &config, - format!(r#"token = "{token}""#, token = ORIGINAL_TOKEN) - )); -} - -fn check_token(expected_token: &str, registry: Option<&str>) -> bool { - let credentials = cargo_home().join("credentials"); - assert!(credentials.is_file()); - - let contents = fs::read_to_string(&credentials).unwrap(); - let toml: toml::Value = contents.parse().unwrap(); - - let token = match (registry, toml) { - // A registry has been provided, so check that the token exists in a - // table for the registry. - (Some(registry), toml::Value::Table(table)) => table - .get("registries") - .and_then(|registries_table| registries_table.get(registry)) - .and_then(|registry_table| match registry_table.get("token") { - Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()), - _ => None, - }), - // There is no registry provided, so check the global token instead. - (None, toml::Value::Table(table)) => table - .get("registry") - .and_then(|registry_table| registry_table.get("token")) - .and_then(|v| match v { - toml::Value::String(ref token) => Some(token.as_str().to_string()), - _ => None, - }), - _ => None, - }; - - if let Some(token_val) = token { - token_val == expected_token - } else { - false - } -} - -#[cargo_test] -fn registry_credentials() { - registry::alt_init(); - - let config = paths::home().join(".cargo/config"); - let mut f = OpenOptions::new().append(true).open(config).unwrap(); - t!(f.write_all( - format!( - r#" - [registries.alternative2] - index = '{}' - "#, - registry::generate_url("alternative2-registry") - ) - .as_bytes(), - )); - - registry::init_registry( - registry::generate_path("alternative2-registry"), - registry::generate_alt_dl_url("alt2_dl"), - registry::generate_url("alt2_api"), - registry::generate_path("alt2_api"), - ); - setup_new_credentials(); - - let reg = "alternative"; - - cargo_process("login --registry").arg(reg).arg(TOKEN).run(); - - // Ensure that we have not updated the default token - assert!(check_token(ORIGINAL_TOKEN, None)); - - // Also ensure that we get the new token for the registry - assert!(check_token(TOKEN, Some(reg))); - - let reg2 = "alternative2"; - cargo_process("login --registry") - .arg(reg2) - .arg(TOKEN2) - .run(); - - // Ensure not overwriting 1st alternate registry token with - // 2nd alternate registry token (see rust-lang/cargo#7701). - assert!(check_token(ORIGINAL_TOKEN, None)); - assert!(check_token(TOKEN, Some(reg))); - assert!(check_token(TOKEN2, Some(reg2))); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/logout.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/logout.rs deleted file mode 100644 index 606a06c84..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/logout.rs +++ /dev/null @@ -1,83 +0,0 @@ -//! Tests for the `cargo logout` command. - -use cargo_test_support::install::cargo_home; -use cargo_test_support::{cargo_process, registry}; -use std::fs; - -#[cargo_test] -fn gated() { - registry::init(); - cargo_process("logout") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] the `cargo logout` command is unstable, pass `-Z unstable-options` to enable it -See https://github.com/rust-lang/cargo/issues/8933 for more information about \ -the `cargo logout` command. -", - ) - .run(); -} - -/// Checks whether or not the token is set for the given token. -fn check_config_token(registry: Option<&str>, should_be_set: bool) { - let credentials = cargo_home().join("credentials"); - let contents = fs::read_to_string(&credentials).unwrap(); - let toml: toml::Value = contents.parse().unwrap(); - if let Some(registry) = registry { - assert_eq!( - toml.get("registries") - .and_then(|registries| registries.get(registry)) - .and_then(|registry| registry.get("token")) - .is_some(), - should_be_set - ); - } else { - assert_eq!( - toml.get("registry") - .and_then(|registry| registry.get("token")) - .is_some(), - should_be_set - ); - } -} - -fn simple_logout_test(reg: Option<&str>, flag: &str) { - registry::init(); - let msg = reg.unwrap_or("crates.io"); - check_config_token(reg, true); - cargo_process(&format!("logout -Z unstable-options {}", flag)) - .masquerade_as_nightly_cargo() - .with_stderr(&format!( - "\ -[UPDATING] [..] -[LOGOUT] token for `{}` has been removed from local storage -", - msg - )) - .run(); - check_config_token(reg, false); - - cargo_process(&format!("logout -Z unstable-options {}", flag)) - .masquerade_as_nightly_cargo() - .with_stderr(&format!( - "\ -[LOGOUT] not currently logged in to `{}` -", - msg - )) - .run(); - check_config_token(reg, false); -} - -#[cargo_test] -fn default_registry() { - simple_logout_test(None, ""); -} - -#[cargo_test] -fn other_registry() { - registry::alt_init(); - simple_logout_test(Some("alternative"), "--registry alternative"); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/lto.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/lto.rs deleted file mode 100644 index 82494d4e7..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/lto.rs +++ /dev/null @@ -1,842 +0,0 @@ -use cargo::core::compiler::Lto; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, project, Project}; -use std::process::Output; - -#[cargo_test] -fn with_deps() { - Package::new("bar", "0.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.0.0" - - [dependencies] - bar = "*" - - [profile.release] - lto = true - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() {}") - .build(); - p.cargo("build -v --release") - .with_stderr_contains("[..]`rustc[..]--crate-name bar[..]-C linker-plugin-lto[..]`") - .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") - .run(); -} - -#[cargo_test] -fn shared_deps() { - Package::new("bar", "0.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.0.0" - - [dependencies] - bar = "*" - - [build-dependencies] - bar = "*" - - [profile.release] - lto = true - "#, - ) - .file("build.rs", "extern crate bar; fn main() {}") - .file("src/main.rs", "extern crate bar; fn main() {}") - .build(); - p.cargo("build -v --release") - .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") - .run(); -} - -#[cargo_test] -fn build_dep_not_ltod() { - Package::new("bar", "0.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.0.0" - - [build-dependencies] - bar = "*" - - [profile.release] - lto = true - "#, - ) - .file("build.rs", "extern crate bar; fn main() {}") - .file("src/main.rs", "fn main() {}") - .build(); - p.cargo("build -v --release") - .with_stderr_contains("[..]`rustc[..]--crate-name bar[..]-C embed-bitcode=no[..]`") - .with_stderr_contains("[..]`rustc[..]--crate-name test[..]-C lto[..]`") - .run(); -} - -#[cargo_test] -fn complicated() { - Package::new("dep-shared", "0.0.1") - .file("src/lib.rs", "pub fn foo() {}") - .publish(); - Package::new("dep-normal2", "0.0.1") - .file("src/lib.rs", "pub fn foo() {}") - .publish(); - Package::new("dep-normal", "0.0.1") - .dep("dep-shared", "*") - .dep("dep-normal2", "*") - .file( - "src/lib.rs", - " - pub fn foo() { - dep_shared::foo(); - dep_normal2::foo(); - } - ", - ) - .publish(); - Package::new("dep-build2", "0.0.1") - .file("src/lib.rs", "pub fn foo() {}") - .publish(); - Package::new("dep-build", "0.0.1") - .dep("dep-shared", "*") - .dep("dep-build2", "*") - .file( - "src/lib.rs", - " - pub fn foo() { - dep_shared::foo(); - dep_build2::foo(); - } - ", - ) - .publish(); - Package::new("dep-proc-macro2", "0.0.1") - .file("src/lib.rs", "pub fn foo() {}") - .publish(); - Package::new("dep-proc-macro", "0.0.1") - .proc_macro(true) - .dep("dep-shared", "*") - .dep("dep-proc-macro2", "*") - .file( - "src/lib.rs", - " - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro_attribute] - pub fn foo(_: TokenStream, a: TokenStream) -> TokenStream { - dep_shared::foo(); - dep_proc_macro2::foo(); - a - } - ", - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.0.0" - - [lib] - crate-type = ['cdylib', 'staticlib'] - - [dependencies] - dep-normal = "*" - dep-proc-macro = "*" - - [build-dependencies] - dep-build = "*" - - [profile.release] - lto = true - - # force build deps to share an opt-level with the rest of the - # graph so they only get built once. - [profile.release.build-override] - opt-level = 3 - "#, - ) - .file("build.rs", "fn main() { dep_build::foo() }") - .file( - "src/bin/foo-bin.rs", - "#[dep_proc_macro::foo] fn main() { dep_normal::foo() }", - ) - .file( - "src/lib.rs", - "#[dep_proc_macro::foo] pub fn foo() { dep_normal::foo() }", - ) - .build(); - p.cargo("build -v --release") - // normal deps and their transitive dependencies do not need object - // code, so they should have linker-plugin-lto specified - .with_stderr_contains( - "[..]`rustc[..]--crate-name dep_normal2 [..]-C linker-plugin-lto[..]`", - ) - .with_stderr_contains("[..]`rustc[..]--crate-name dep_normal [..]-C linker-plugin-lto[..]`") - // build dependencies and their transitive deps don't need any bitcode, - // so embedding should be turned off - .with_stderr_contains("[..]`rustc[..]--crate-name dep_build2 [..]-C embed-bitcode=no[..]`") - .with_stderr_contains("[..]`rustc[..]--crate-name dep_build [..]-C embed-bitcode=no[..]`") - .with_stderr_contains( - "[..]`rustc[..]--crate-name build_script_build [..]-C embed-bitcode=no[..]`", - ) - // proc macro deps are the same as build deps here - .with_stderr_contains( - "[..]`rustc[..]--crate-name dep_proc_macro2 [..]-C embed-bitcode=no[..]`", - ) - .with_stderr_contains( - "[..]`rustc[..]--crate-name dep_proc_macro [..]-C embed-bitcode=no[..]`", - ) - .with_stderr_contains( - "[..]`rustc[..]--crate-name foo_bin [..]--crate-type bin[..]-C lto[..]`", - ) - .with_stderr_contains( - "[..]`rustc[..]--crate-name test [..]--crate-type cdylib[..]-C lto[..]`", - ) - .with_stderr_contains("[..]`rustc[..]--crate-name dep_shared [..]`") - .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C lto[..]") - .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C linker-plugin-lto[..]") - .with_stderr_does_not_contain("[..]--crate-name dep_shared[..]-C embed-bitcode[..]") - .run(); -} - -#[cargo_test] -fn off_in_manifest_works() { - Package::new("bar", "0.0.1") - .file("src/lib.rs", "pub fn foo() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.0.0" - - [dependencies] - bar = "*" - - [profile.release] - lto = "off" - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/main.rs", - "fn main() { - test::foo(); - bar::foo(); - }", - ) - .build(); - p.cargo("build -v --release") - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] [..] -[DOWNLOADED] [..] -[COMPILING] bar v0.0.1 -[RUNNING] `rustc --crate-name bar [..]--crate-type lib [..]-C lto=off -C embed-bitcode=no[..] -[COMPILING] test [..] -[RUNNING] `rustc --crate-name test [..]--crate-type lib [..]-C lto=off -C embed-bitcode=no[..] -[RUNNING] `rustc --crate-name test src/main.rs [..]--crate-type bin [..]-C lto=off[..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn between_builds() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.0.0" - - [profile.release] - lto = true - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file("src/main.rs", "fn main() { test::foo() }") - .build(); - p.cargo("build -v --release --lib") - .with_stderr( - "\ -[COMPILING] test [..] -[RUNNING] `rustc [..]--crate-type lib[..]-C linker-plugin-lto[..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("build -v --release") - .with_stderr_contains( - "\ -[COMPILING] test [..] -[RUNNING] `rustc [..]--crate-type bin[..]-C lto[..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn test_all() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - - [profile.release] - lto = true - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("tests/a.rs", "") - .file("tests/b.rs", "") - .build(); - p.cargo("test --release -v") - .with_stderr_contains("[RUNNING] `rustc[..]--crate-name foo[..]-C lto[..]") - .run(); -} - -#[cargo_test] -fn test_all_and_bench() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - - [profile.release] - lto = true - [profile.bench] - lto = true - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("tests/a.rs", "") - .file("tests/b.rs", "") - .build(); - p.cargo("test --release -v") - .with_stderr_contains("[RUNNING] `rustc[..]--crate-name a[..]-C lto[..]") - .with_stderr_contains("[RUNNING] `rustc[..]--crate-name b[..]-C lto[..]") - .with_stderr_contains("[RUNNING] `rustc[..]--crate-name foo[..]-C lto[..]") - .run(); -} - -/// Basic setup: -/// -/// foo v0.0.0 -/// โ”œโ”€โ”€ bar v0.0.0 -/// โ”‚ โ”œโ”€โ”€ registry v0.0.1 -/// โ”‚ โ””โ”€โ”€ registry-shared v0.0.1 -/// โ””โ”€โ”€ registry-shared v0.0.1 -/// -/// Where `bar` will have the given crate types. -fn project_with_dep(crate_types: &str) -> Project { - Package::new("registry", "0.0.1") - .file("src/lib.rs", r#"pub fn foo() { println!("registry"); }"#) - .publish(); - Package::new("registry-shared", "0.0.1") - .file("src/lib.rs", r#"pub fn foo() { println!("shared"); }"#) - .publish(); - - project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.0" - - [workspace] - - [dependencies] - bar = { path = 'bar' } - registry-shared = "*" - - [profile.release] - lto = true - "#, - ) - .file( - "src/main.rs", - " - fn main() { - bar::foo(); - registry_shared::foo(); - } - ", - ) - .file( - "bar/Cargo.toml", - &format!( - r#" - [package] - name = "bar" - version = "0.0.0" - - [dependencies] - registry = "*" - registry-shared = "*" - - [lib] - crate-type = [{}] - "#, - crate_types - ), - ) - .file( - "bar/src/lib.rs", - r#" - pub fn foo() { - println!("bar"); - registry::foo(); - registry_shared::foo(); - } - "#, - ) - .file("tests/a.rs", "") - .file("bar/tests/b.rs", "") - .build() -} - -/// Helper for checking which LTO behavior is used for a specific crate. -/// -/// `krate_info` is extra compiler flags used to distinguish this if the same -/// crate name is being built multiple times. -fn verify_lto(output: &Output, krate: &str, krate_info: &str, expected_lto: Lto) { - let stderr = std::str::from_utf8(&output.stderr).unwrap(); - let mut matches = stderr.lines().filter(|line| { - line.contains("Running") - && line.contains(&format!("--crate-name {} ", krate)) - && line.contains(krate_info) - }); - let line = matches.next().unwrap_or_else(|| { - panic!( - "expected to find crate `{}` info: `{}`, not found in output:\n{}", - krate, krate_info, stderr - ); - }); - if let Some(line2) = matches.next() { - panic!( - "found multiple lines matching crate `{}` info: `{}`:\nline1:{}\nline2:{}\noutput:\n{}", - krate, krate_info, line, line2, stderr - ); - } - let actual_lto = if let Some(index) = line.find("-C lto=") { - let s = &line[index..]; - let end = s.find(' ').unwrap(); - let mode = &line[index..index + end]; - if mode == "off" { - Lto::Off - } else { - Lto::Run(Some(mode.into())) - } - } else if line.contains("-C lto") { - Lto::Run(None) - } else if line.contains("-C linker-plugin-lto") { - Lto::OnlyBitcode - } else if line.contains("-C embed-bitcode=no") { - Lto::OnlyObject - } else { - Lto::ObjectAndBitcode - }; - assert_eq!( - actual_lto, expected_lto, - "did not find expected LTO in line: {}", - line - ); -} - -#[cargo_test] -fn cdylib_and_rlib() { - let p = project_with_dep("'cdylib', 'rlib'"); - let output = p.cargo("build --release -v").exec_with_output().unwrap(); - // `registry` is ObjectAndBitcode because because it needs Object for the - // rlib, and Bitcode for the cdylib (which doesn't support LTO). - verify_lto( - &output, - "registry", - "--crate-type lib", - Lto::ObjectAndBitcode, - ); - // Same as `registry` - verify_lto( - &output, - "registry_shared", - "--crate-type lib", - Lto::ObjectAndBitcode, - ); - // Same as `registry` - verify_lto( - &output, - "bar", - "--crate-type cdylib --crate-type rlib", - Lto::ObjectAndBitcode, - ); - verify_lto(&output, "foo", "--crate-type bin", Lto::Run(None)); - p.cargo("test --release -v") - .with_stderr_unordered( - "\ -[FRESH] registry v0.0.1 -[FRESH] registry-shared v0.0.1 -[FRESH] bar v0.0.0 [..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo [..]-C lto [..]--test[..] -[RUNNING] `rustc --crate-name a [..]-C lto [..]--test[..] -[FINISHED] [..] -[RUNNING] [..] -[RUNNING] [..] -", - ) - .run(); - p.cargo("build --release -v --manifest-path bar/Cargo.toml") - .with_stderr_unordered( - "\ -[FRESH] registry-shared v0.0.1 -[FRESH] registry v0.0.1 -[FRESH] bar v0.0.0 [..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("test --release -v --manifest-path bar/Cargo.toml") - .with_stderr_unordered( - "\ -[FRESH] registry-shared v0.0.1 -[FRESH] registry v0.0.1 -[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar [..]-C lto[..]--test[..] -[RUNNING] `rustc --crate-name b [..]-C lto[..]--test[..] -[FINISHED] [..] -[RUNNING] [..]target/release/deps/bar-[..] -[RUNNING] [..]target/release/deps/b-[..] -[DOCTEST] bar -[RUNNING] `rustdoc --crate-type cdylib --crate-type rlib --crate-name bar --test [..]-C lto[..] -", - ) - .run(); -} - -#[cargo_test] -fn dylib() { - let p = project_with_dep("'dylib'"); - let output = p.cargo("build --release -v").exec_with_output().unwrap(); - // `registry` is OnlyObject because rustc doesn't support LTO with dylibs. - verify_lto(&output, "registry", "--crate-type lib", Lto::OnlyObject); - // `registry_shared` is both because it is needed by both bar (Object) and - // foo (Bitcode for LTO). - verify_lto( - &output, - "registry_shared", - "--crate-type lib", - Lto::ObjectAndBitcode, - ); - // `bar` is OnlyObject because rustc doesn't support LTO with dylibs. - verify_lto(&output, "bar", "--crate-type dylib", Lto::OnlyObject); - // `foo` is LTO because it is a binary, and the profile specifies `lto=true`. - verify_lto(&output, "foo", "--crate-type bin", Lto::Run(None)); - // `cargo test` should not rebuild dependencies. It builds the test - // executables with `lto=true` because the tests are built with the - // `--release` flag. - p.cargo("test --release -v") - .with_stderr_unordered( - "\ -[FRESH] registry v0.0.1 -[FRESH] registry-shared v0.0.1 -[FRESH] bar v0.0.0 [..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo [..]-C lto [..]--test[..] -[RUNNING] `rustc --crate-name a [..]-C lto [..]--test[..] -[FINISHED] [..] -[RUNNING] [..] -[RUNNING] [..] -", - ) - .run(); - // Building just `bar` causes `registry-shared` to get rebuilt because it - // switches to OnlyObject because it is now only being used with a dylib - // which does not support LTO. - // - // `bar` gets rebuilt because `registry_shared` got rebuilt. - p.cargo("build --release -v --manifest-path bar/Cargo.toml") - .with_stderr_unordered( - "\ -[COMPILING] registry-shared v0.0.1 -[FRESH] registry v0.0.1 -[RUNNING] `rustc --crate-name registry_shared [..]-C embed-bitcode=no[..] -[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar [..]--crate-type dylib [..]-C embed-bitcode=no[..] -[FINISHED] [..] -", - ) - .run(); - // Testing just `bar` causes `registry` to get rebuilt because it switches - // to needing both Object (for the `bar` dylib) and Bitcode (for the test - // built with LTO). - // - // `bar` the dylib gets rebuilt because `registry` got rebuilt. - p.cargo("test --release -v --manifest-path bar/Cargo.toml") - .with_stderr_unordered( - "\ -[FRESH] registry-shared v0.0.1 -[COMPILING] registry v0.0.1 -[RUNNING] `rustc --crate-name registry [..] -[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar [..]--crate-type dylib [..]-C embed-bitcode=no[..] -[RUNNING] `rustc --crate-name bar [..]-C lto [..]--test[..] -[RUNNING] `rustc --crate-name b [..]-C lto [..]--test[..] -[FINISHED] [..] -[RUNNING] [..] -[RUNNING] [..] -", - ) - .run(); -} - -#[cargo_test] -fn test_profile() { - Package::new("bar", "0.0.1") - .file("src/lib.rs", "pub fn foo() -> i32 { 123 } ") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [profile.test] - lto = 'thin' - - [dependencies] - bar = "*" - "#, - ) - .file( - "src/lib.rs", - r#" - #[test] - fn t1() { - assert_eq!(123, bar::foo()); - } - "#, - ) - .build(); - - p.cargo("test -v") - // unordered because the two `foo` builds start in parallel - .with_stderr_unordered("\ -[UPDATING] [..] -[DOWNLOADING] [..] -[DOWNLOADED] [..] -[COMPILING] bar v0.0.1 -[RUNNING] `rustc --crate-name bar [..]crate-type lib[..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo [..]--crate-type lib --emit=dep-info,metadata,link -C linker-plugin-lto[..] -[RUNNING] `rustc --crate-name foo [..]--emit=dep-info,link -C lto=thin [..]--test[..] -[FINISHED] [..] -[RUNNING] [..] -[DOCTEST] foo -[RUNNING] `rustdoc [..] -") - .run(); -} - -#[cargo_test] -fn doctest() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [profile.release] - lto = true - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file( - "src/lib.rs", - r#" - /// Foo! - /// - /// ``` - /// foo::foo(); - /// ``` - pub fn foo() { bar::bar(); } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file( - "bar/src/lib.rs", - r#" - pub fn bar() { println!("hi!"); } - "#, - ) - .build(); - - p.cargo("test --doc --release -v") - .with_stderr_contains("[..]`rustc --crate-name bar[..]-C linker-plugin-lto[..]") - .with_stderr_contains("[..]`rustc --crate-name foo[..]-C linker-plugin-lto[..]") - // embed-bitcode should be harmless here - .with_stderr_contains("[..]`rustdoc [..]-C lto[..]") - .run(); - - // Try with bench profile. - p.cargo("test --doc --release -v") - .env("CARGO_PROFILE_BENCH_LTO", "true") - .with_stderr_unordered( - "\ -[FRESH] bar v0.1.0 [..] -[FRESH] foo v0.1.0 [..] -[FINISHED] release [..] -[DOCTEST] foo -[RUNNING] `rustdoc [..]-C lto[..] -", - ) - .run(); -} - -#[cargo_test] -fn dylib_rlib_bin() { - // dylib+rlib linked with a binary - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [lib] - crate-type = ["dylib", "rlib"] - - [profile.release] - lto = true - "#, - ) - .file("src/lib.rs", "pub fn foo() { println!(\"hi!\"); }") - .file("src/bin/ferret.rs", "fn main() { foo::foo(); }") - .build(); - - let output = p.cargo("build --release -v").exec_with_output().unwrap(); - verify_lto( - &output, - "foo", - "--crate-type dylib --crate-type rlib", - Lto::ObjectAndBitcode, - ); - verify_lto(&output, "ferret", "--crate-type bin", Lto::Run(None)); -} - -#[cargo_test] -fn fresh_swapping_commands() { - // In some rare cases, different commands end up building dependencies - // with different LTO settings. This checks that it doesn't cause the - // cache to thrash in that scenario. - Package::new("bar", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - - [profile.release] - lto = true - "#, - ) - .file("src/lib.rs", "pub fn foo() { println!(\"hi!\"); }") - .build(); - - p.cargo("build --release -v") - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.0 [..] -[COMPILING] bar v1.0.0 -[RUNNING] `rustc --crate-name bar [..]-C linker-plugin-lto[..] -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C linker-plugin-lto[..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("test --release -v") - .with_stderr_unordered( - "\ -[FRESH] bar v1.0.0 -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C lto[..]--test[..] -[FINISHED] [..] -[RUNNING] `[..]/foo[..]` -[DOCTEST] foo -[RUNNING] `rustdoc [..]-C lto[..] -", - ) - .run(); - - p.cargo("build --release -v") - .with_stderr( - "\ -[FRESH] bar v1.0.0 -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("test --release -v --no-run -v") - .with_stderr( - "\ -[FRESH] bar v1.0.0 -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/main.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/main.rs deleted file mode 100644 index 8c30bf929..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/main.rs +++ /dev/null @@ -1,138 +0,0 @@ -// See src/cargo/lib.rs for notes on these lint settings. -#![warn(rust_2018_idioms)] -#![allow(clippy::all)] -#![warn(clippy::needless_borrow)] -#![warn(clippy::redundant_clone)] -#![cfg_attr(feature = "deny-warnings", deny(warnings))] - -#[macro_use] -extern crate cargo_test_macro; - -mod advanced_env; -mod alt_registry; -mod bad_config; -mod bad_manifest_path; -mod bench; -mod binary_name; -mod build; -mod build_plan; -mod build_script; -mod build_script_env; -mod build_script_extra_link_arg; -mod cache_messages; -mod cargo_alias_config; -mod cargo_command; -mod cargo_config; -mod cargo_env_config; -mod cargo_features; -mod cargo_targets; -mod cfg; -mod check; -mod clean; -mod collisions; -mod concurrent; -mod config; -mod config_cli; -mod config_include; -mod corrupt_git; -mod credential_process; -mod cross_compile; -mod cross_publish; -mod custom_target; -mod death; -mod dep_info; -mod directory; -mod doc; -mod edition; -mod error; -mod features; -mod features2; -mod features_namespaced; -mod fetch; -mod fix; -mod freshness; -mod future_incompat_report; -mod generate_lockfile; -mod git; -mod git_auth; -mod git_gc; -mod glob_targets; -mod help; -mod init; -mod install; -mod install_upgrade; -mod jobserver; -mod list_availables; -mod local_registry; -mod locate_project; -mod lockfile_compat; -mod login; -mod logout; -mod lto; -mod member_discovery; -mod member_errors; -mod message_format; -mod messages; -mod metabuild; -mod metadata; -mod minimal_versions; -mod multitarget; -mod net_config; -mod new; -mod offline; -mod old_cargos; -mod out_dir; -mod owner; -mod package; -mod package_features; -mod patch; -mod path; -mod paths; -mod pkgid; -mod plugins; -mod proc_macro; -mod profile_config; -mod profile_custom; -mod profile_overrides; -mod profile_targets; -mod profiles; -mod progress; -mod pub_priv; -mod publish; -mod publish_lockfile; -mod read_manifest; -mod registry; -mod rename_deps; -mod replace; -mod required_features; -mod run; -mod rust_version; -mod rustc; -mod rustc_info_cache; -mod rustdoc; -mod rustdoc_extern_html; -mod rustdocflags; -mod rustflags; -mod search; -mod shell_quoting; -mod standard_lib; -mod test; -mod timings; -mod tool_paths; -mod tree; -mod tree_graph_features; -mod unit_graph; -mod update; -mod vendor; -mod verify_project; -mod version; -mod warn_on_failure; -mod weak_dep_features; -mod workspaces; -mod yank; - -#[cargo_test] -fn aaa_trigger_cross_compile_disabled_check() { - // This triggers the cross compile disabled check to run ASAP, see #5141 - cargo_test_support::cross_compile::disabled(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/member_discovery.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/member_discovery.rs deleted file mode 100644 index e882add3e..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/member_discovery.rs +++ /dev/null @@ -1,44 +0,0 @@ -//! Tests for workspace member discovery. - -use cargo::core::{Shell, Workspace}; -use cargo::util::config::Config; - -use cargo_test_support::install::cargo_home; -use cargo_test_support::project; -use cargo_test_support::registry; - -/// Tests exclusion of non-directory files from workspace member discovery using glob `*`. -#[cargo_test] -fn bad_file_member_exclusion() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = [ "crates/*" ] - "#, - ) - .file("crates/.DS_Store", "PLACEHOLDER") - .file( - "crates/bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - "#, - ) - .file("crates/bar/src/main.rs", "fn main() {}") - .build(); - - // Prevent this test from accessing the network by setting up .cargo/config. - registry::init(); - let config = Config::new( - Shell::from_write(Box::new(Vec::new())), - cargo_home(), - cargo_home(), - ); - let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap(); - assert_eq!(ws.members().count(), 1); - assert_eq!(ws.members().next().unwrap().name(), "bar"); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/member_errors.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/member_errors.rs deleted file mode 100644 index 10533b292..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/member_errors.rs +++ /dev/null @@ -1,164 +0,0 @@ -//! Tests for workspace member errors. - -use cargo::core::resolver::ResolveError; -use cargo::core::{compiler::CompileMode, Shell, Workspace}; -use cargo::ops::{self, CompileOptions}; -use cargo::util::{config::Config, errors::ManifestError}; - -use cargo_test_support::install::cargo_home; -use cargo_test_support::project; -use cargo_test_support::registry; - -/// Tests inclusion of a `ManifestError` pointing to a member manifest -/// when that manifest fails to deserialize. -#[cargo_test] -fn toml_deserialize_manifest_error() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - foobar == "0.55" - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - let root_manifest_path = p.root().join("Cargo.toml"); - let member_manifest_path = p.root().join("bar").join("Cargo.toml"); - - let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err(); - eprintln!("{:?}", error); - - let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError"); - assert_eq!(manifest_err.manifest_path(), &root_manifest_path); - - let causes: Vec<_> = manifest_err.manifest_causes().collect(); - assert_eq!(causes.len(), 1, "{:?}", causes); - assert_eq!(causes[0].manifest_path(), &member_manifest_path); -} - -/// Tests inclusion of a `ManifestError` pointing to a member manifest -/// when that manifest has an invalid dependency path. -#[cargo_test] -fn member_manifest_path_io_error() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - foobar = { path = "nosuch" } - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - let root_manifest_path = p.root().join("Cargo.toml"); - let member_manifest_path = p.root().join("bar").join("Cargo.toml"); - let missing_manifest_path = p.root().join("bar").join("nosuch").join("Cargo.toml"); - - let error = Workspace::new(&root_manifest_path, &Config::default().unwrap()).unwrap_err(); - eprintln!("{:?}", error); - - let manifest_err: &ManifestError = error.downcast_ref().expect("Not a ManifestError"); - assert_eq!(manifest_err.manifest_path(), &root_manifest_path); - - let causes: Vec<_> = manifest_err.manifest_causes().collect(); - assert_eq!(causes.len(), 2, "{:?}", causes); - assert_eq!(causes[0].manifest_path(), &member_manifest_path); - assert_eq!(causes[1].manifest_path(), &missing_manifest_path); -} - -/// Tests dependency version errors provide which package failed via a `ResolveError`. -#[cargo_test] -fn member_manifest_version_error() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - i-dont-exist = "0.55" - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - // Prevent this test from accessing the network by setting up .cargo/config. - registry::init(); - let config = Config::new( - Shell::from_write(Box::new(Vec::new())), - cargo_home(), - cargo_home(), - ); - let ws = Workspace::new(&p.root().join("Cargo.toml"), &config).unwrap(); - let compile_options = CompileOptions::new(&config, CompileMode::Build).unwrap(); - let member_bar = ws.members().find(|m| &*m.name() == "bar").unwrap(); - - let error = ops::compile(&ws, &compile_options).map(|_| ()).unwrap_err(); - eprintln!("{:?}", error); - - let resolve_err: &ResolveError = error.downcast_ref().expect("Not a ResolveError"); - let package_path = resolve_err.package_path(); - assert_eq!(package_path.len(), 1, "package_path: {:?}", package_path); - assert_eq!(package_path[0], member_bar.package_id()); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/message_format.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/message_format.rs deleted file mode 100644 index 5a79056cf..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/message_format.rs +++ /dev/null @@ -1,133 +0,0 @@ -//! Tests for --message-format flag. - -use cargo_test_support::{basic_lib_manifest, basic_manifest, project}; - -#[cargo_test] -fn cannot_specify_two() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - let formats = ["human", "json", "short"]; - - let two_kinds = "error: cannot specify two kinds of `message-format` arguments\n"; - for a in formats.iter() { - for b in formats.iter() { - p.cargo(&format!("build --message-format {},{}", a, b)) - .with_status(101) - .with_stderr(two_kinds) - .run(); - } - } -} - -#[cargo_test] -fn double_json_works() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --message-format json,json-render-diagnostics") - .run(); - p.cargo("build --message-format json,json-diagnostic-short") - .run(); - p.cargo("build --message-format json,json-diagnostic-rendered-ansi") - .run(); - p.cargo("build --message-format json --message-format json-diagnostic-rendered-ansi") - .run(); - p.cargo("build --message-format json-diagnostic-rendered-ansi") - .run(); - p.cargo("build --message-format json-diagnostic-short,json-diagnostic-rendered-ansi") - .run(); -} - -#[cargo_test] -fn cargo_renders() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - - [dependencies] - bar = { path = 'bar' } - "#, - ) - .file("src/main.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build --message-format json-render-diagnostics") - .with_status(101) - .with_stdout( - "{\"reason\":\"compiler-artifact\",[..]\n\ - {\"reason\":\"build-finished\",\"success\":false}", - ) - .with_stderr_contains( - "\ -[COMPILING] bar [..] -[COMPILING] foo [..] -error[..]`main`[..] -", - ) - .run(); -} - -#[cargo_test] -fn cargo_renders_short() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "") - .build(); - - p.cargo("build --message-format json-render-diagnostics,json-diagnostic-short") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo [..] -error[..]`main`[..] -", - ) - .with_stderr_does_not_contain("note:") - .run(); -} - -#[cargo_test] -fn cargo_renders_ansi() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/main.rs", "") - .build(); - - p.cargo("build --message-format json-diagnostic-rendered-ansi") - .with_status(101) - .with_stdout_contains("[..]\\u001b[38;5;9merror[..]") - .run(); -} - -#[cargo_test] -fn cargo_renders_doctests() { - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file( - "src/lib.rs", - "\ - /// ```rust - /// bar() - /// ``` - pub fn bar() {} - ", - ) - .build(); - - p.cargo("test --doc --message-format short") - .with_status(101) - .with_stdout_contains("src/lib.rs:2:1: error[E0425]:[..]") - .with_stdout_contains("[..]src/lib.rs - bar (line 1)[..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/messages.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/messages.rs deleted file mode 100644 index 0c55e15fb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/messages.rs +++ /dev/null @@ -1,142 +0,0 @@ -//! General tests specifically about diagnostics and other messages. -//! -//! Tests for message caching can be found in `cache_messages`. - -use cargo_test_support::{process, project, Project}; -use cargo_util::ProcessError; - -/// Captures the actual diagnostics displayed by rustc. This is done to avoid -/// relying on the exact message formatting in rustc. -pub fn raw_rustc_output(project: &Project, path: &str, extra: &[&str]) -> String { - let mut proc = process("rustc"); - if cfg!(windows) { - // Sanitize in case the caller wants to do direct string comparison with Cargo's output. - proc.arg(path.replace('/', "\\")); - } else { - proc.arg(path); - } - let rustc_output = match proc - .arg("--crate-type=lib") - .args(extra) - .cwd(project.root()) - .exec_with_output() - { - Ok(output) => output.stderr, - Err(e) => e.downcast::().unwrap().stderr.unwrap(), - }; - // Do a little dance to remove rustc's "warnings emitted" message and the subsequent newline. - let stderr = std::str::from_utf8(&rustc_output).expect("utf8"); - let mut lines = stderr.lines(); - let mut result = String::new(); - while let Some(line) = lines.next() { - if line.contains("warning emitted") - || line.contains("warnings emitted") - || line.contains("aborting due to") - { - // Eat blank line. - match lines.next() { - None | Some("") => continue, - Some(s) => panic!("unexpected str {}", s), - } - } - result.push_str(line); - result.push('\n'); - } - result -} - -#[cargo_test] -fn deduplicate_messages_basic() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn foo() { - let x = 1; - } - "#, - ) - .build(); - let rustc_message = raw_rustc_output(&p, "src/lib.rs", &[]); - let expected_output = format!( - "{}\ -warning: `foo` (lib) generated 1 warning -warning: `foo` (lib test) generated 1 warning (1 duplicate) -[FINISHED] [..] -", - rustc_message - ); - p.cargo("test --no-run -j1") - .with_stderr(&format!("[COMPILING] foo [..]\n{}", expected_output)) - .run(); - // Run again, to check for caching behavior. - p.cargo("test --no-run -j1") - .with_stderr(expected_output) - .run(); -} - -#[cargo_test] -fn deduplicate_messages_mismatched_warnings() { - // One execution prints 1 warning, the other prints 2 where there is an overlap. - let p = project() - .file( - "src/lib.rs", - r#" - pub fn foo() { - let x = 1; - } - - #[test] - fn t1() { - let MY_VALUE = 1; - assert_eq!(MY_VALUE, 1); - } - "#, - ) - .build(); - let lib_output = raw_rustc_output(&p, "src/lib.rs", &[]); - let mut lib_test_output = raw_rustc_output(&p, "src/lib.rs", &["--test"]); - // Remove the duplicate warning. - let start = lib_test_output.find(&lib_output).expect("same warning"); - lib_test_output.replace_range(start..start + lib_output.len(), ""); - let expected_output = format!( - "\ -{}\ -warning: `foo` (lib) generated 1 warning -{}\ -warning: `foo` (lib test) generated 2 warnings (1 duplicate) -[FINISHED] [..] -", - lib_output, lib_test_output - ); - p.cargo("test --no-run -j1") - .with_stderr(&format!("[COMPILING] foo v0.0.1 [..]\n{}", expected_output)) - .run(); - // Run again, to check for caching behavior. - p.cargo("test --no-run -j1") - .with_stderr(expected_output) - .run(); -} - -#[cargo_test] -fn deduplicate_errors() { - let p = project() - .file( - "src/lib.rs", - r#" - this should not compile - "#, - ) - .build(); - let rustc_message = raw_rustc_output(&p, "src/lib.rs", &[]); - p.cargo("test -j1") - .with_status(101) - .with_stderr(&format!( - "\ -[COMPILING] foo v0.0.1 [..] -{}error: could not compile `foo` due to previous error -", - rustc_message - )) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/metabuild.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/metabuild.rs deleted file mode 100644 index d37d09b63..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/metabuild.rs +++ /dev/null @@ -1,769 +0,0 @@ -//! Tests for the metabuild feature (declarative build scripts). - -use cargo_test_support::{ - basic_lib_manifest, basic_manifest, is_coarse_mtime, project, registry::Package, rustc_host, - Project, -}; - -use std::str; - -#[cargo_test] -fn metabuild_gated() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - metabuild = ["mb"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - feature `metabuild` is required - - The package requires the Cargo feature called `metabuild`, \ - but that feature is not stabilized in this version of Cargo (1.[..]). - Consider adding `cargo-features = [\"metabuild\"]` to the top of Cargo.toml \ - (above the [package] table) to tell Cargo you are opting in to use this unstable feature. - See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#metabuild \ - for more information about the status of this feature. -", - ) - .run(); -} - -fn basic_project() -> Project { - project() - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "foo" - version = "0.0.1" - metabuild = ["mb", "mb-other"] - - [build-dependencies] - mb = {path="mb"} - mb-other = {path="mb-other"} - "#, - ) - .file("src/lib.rs", "") - .file("mb/Cargo.toml", &basic_lib_manifest("mb")) - .file( - "mb/src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb"); }"#, - ) - .file( - "mb-other/Cargo.toml", - r#" - [package] - name = "mb-other" - version = "0.0.1" - "#, - ) - .file( - "mb-other/src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb-other"); }"#, - ) - .build() -} - -#[cargo_test] -fn metabuild_basic() { - let p = basic_project(); - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_stdout_contains("[foo 0.0.1] Hello mb") - .with_stdout_contains("[foo 0.0.1] Hello mb-other") - .run(); -} - -#[cargo_test] -fn metabuild_error_both() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "foo" - version = "0.0.1" - metabuild = "mb" - - [build-dependencies] - mb = {path="mb"} - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", r#"fn main() {}"#) - .file("mb/Cargo.toml", &basic_lib_manifest("mb")) - .file( - "mb/src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb"); }"#, - ) - .build(); - - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains( - "\ -error: failed to parse manifest at [..] - -Caused by: - cannot specify both `metabuild` and `build` -", - ) - .run(); -} - -#[cargo_test] -fn metabuild_missing_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "foo" - version = "0.0.1" - metabuild = "mb" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains( - "\ -error: failed to parse manifest at [..] - -Caused by: - metabuild package `mb` must be specified in `build-dependencies`", - ) - .run(); -} - -#[cargo_test] -fn metabuild_optional_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "foo" - version = "0.0.1" - metabuild = "mb" - - [build-dependencies] - mb = {path="mb", optional=true} - "#, - ) - .file("src/lib.rs", "") - .file("mb/Cargo.toml", &basic_lib_manifest("mb")) - .file( - "mb/src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb"); }"#, - ) - .build(); - - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_stdout_does_not_contain("[foo 0.0.1] Hello mb") - .run(); - - p.cargo("build -vv --features mb") - .masquerade_as_nightly_cargo() - .with_stdout_contains("[foo 0.0.1] Hello mb") - .run(); -} - -#[cargo_test] -fn metabuild_lib_name() { - // Test when setting `name` on [lib]. - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "foo" - version = "0.0.1" - metabuild = "mb" - - [build-dependencies] - mb = {path="mb"} - "#, - ) - .file("src/lib.rs", "") - .file( - "mb/Cargo.toml", - r#" - [package] - name = "mb" - version = "0.0.1" - [lib] - name = "other" - "#, - ) - .file( - "mb/src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb"); }"#, - ) - .build(); - - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_stdout_contains("[foo 0.0.1] Hello mb") - .run(); -} - -#[cargo_test] -fn metabuild_fresh() { - if is_coarse_mtime() { - // This test doesn't work on coarse mtimes very well. Because the - // metabuild script is created at build time, its mtime is almost - // always equal to the mtime of the output. The second call to `build` - // will then think it needs to be rebuilt when it should be fresh. - return; - } - - // Check that rebuild is fresh. - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "foo" - version = "0.0.1" - metabuild = "mb" - - [build-dependencies] - mb = {path="mb"} - "#, - ) - .file("src/lib.rs", "") - .file("mb/Cargo.toml", &basic_lib_manifest("mb")) - .file( - "mb/src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb"); }"#, - ) - .build(); - - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_stdout_contains("[foo 0.0.1] Hello mb") - .run(); - - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_stdout_does_not_contain("[foo 0.0.1] Hello mb") - .with_stderr( - "\ -[FRESH] mb [..] -[FRESH] foo [..] -[FINISHED] dev [..] -", - ) - .run(); -} - -#[cargo_test] -fn metabuild_links() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "foo" - version = "0.0.1" - links = "cat" - metabuild = "mb" - - [build-dependencies] - mb = {path="mb"} - "#, - ) - .file("src/lib.rs", "") - .file("mb/Cargo.toml", &basic_lib_manifest("mb")) - .file( - "mb/src/lib.rs", - r#" - pub fn metabuild() { - assert_eq!(std::env::var("CARGO_MANIFEST_LINKS"), - Ok("cat".to_string())); - println!("Hello mb"); - } - "#, - ) - .build(); - - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_stdout_contains("[foo 0.0.1] Hello mb") - .run(); -} - -#[cargo_test] -fn metabuild_override() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "foo" - version = "0.0.1" - links = "cat" - metabuild = "mb" - - [build-dependencies] - mb = {path="mb"} - "#, - ) - .file("src/lib.rs", "") - .file("mb/Cargo.toml", &basic_lib_manifest("mb")) - .file( - "mb/src/lib.rs", - r#"pub fn metabuild() { panic!("should not run"); }"#, - ) - .file( - ".cargo/config", - &format!( - r#" - [target.{}.cat] - rustc-link-lib = ["a"] - "#, - rustc_host() - ), - ) - .build(); - - p.cargo("build -vv").masquerade_as_nightly_cargo().run(); -} - -#[cargo_test] -fn metabuild_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["member1", "member2"] - "#, - ) - .file( - "member1/Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "member1" - version = "0.0.1" - metabuild = ["mb1", "mb2"] - - [build-dependencies] - mb1 = {path="../../mb1"} - mb2 = {path="../../mb2"} - "#, - ) - .file("member1/src/lib.rs", "") - .file( - "member2/Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "member2" - version = "0.0.1" - metabuild = ["mb1"] - - [build-dependencies] - mb1 = {path="../../mb1"} - "#, - ) - .file("member2/src/lib.rs", "") - .build(); - - project() - .at("mb1") - .file("Cargo.toml", &basic_lib_manifest("mb1")) - .file( - "src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, - ) - .build(); - - project() - .at("mb2") - .file("Cargo.toml", &basic_lib_manifest("mb2")) - .file( - "src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, - ) - .build(); - - p.cargo("build -vv --workspace") - .masquerade_as_nightly_cargo() - .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1") - .with_stdout_contains("[member1 0.0.1] Hello mb2 [..]member1") - .with_stdout_contains("[member2 0.0.1] Hello mb1 [..]member2") - .with_stdout_does_not_contain("[member2 0.0.1] Hello mb2 [..]member2") - .run(); -} - -#[cargo_test] -fn metabuild_metadata() { - // The metabuild Target is filtered out of the `metadata` results. - let p = basic_project(); - - let meta = p - .cargo("metadata --format-version=1") - .masquerade_as_nightly_cargo() - .run_json(); - let mb_info: Vec<&str> = meta["packages"] - .as_array() - .unwrap() - .iter() - .find(|p| p["name"].as_str().unwrap() == "foo") - .unwrap()["metabuild"] - .as_array() - .unwrap() - .iter() - .map(|s| s.as_str().unwrap()) - .collect(); - assert_eq!(mb_info, ["mb", "mb-other"]); -} - -#[cargo_test] -fn metabuild_build_plan() { - let p = basic_project(); - - p.cargo("build --build-plan -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_json( - r#" - { - "invocations": [ - { - "package_name": "mb", - "package_version": "0.5.0", - "target_kind": ["lib"], - "compile_mode": "build", - "kind": null, - "deps": [], - "outputs": [ - "[..]/target/debug/deps/libmb-[..].rlib", - "[..]/target/debug/deps/libmb-[..].rmeta" - ], - "links": {}, - "program": "rustc", - "args": "{...}", - "env": "{...}", - "cwd": "[..]" - }, - { - "package_name": "mb-other", - "package_version": "0.0.1", - "target_kind": ["lib"], - "compile_mode": "build", - "kind": null, - "deps": [], - "outputs": [ - "[..]/target/debug/deps/libmb_other-[..].rlib", - "[..]/target/debug/deps/libmb_other-[..].rmeta" - ], - "links": {}, - "program": "rustc", - "args": "{...}", - "env": "{...}", - "cwd": "[..]" - }, - { - "package_name": "foo", - "package_version": "0.0.1", - "target_kind": ["custom-build"], - "compile_mode": "build", - "kind": null, - "deps": [0, 1], - "outputs": "{...}", - "links": "{...}", - "program": "rustc", - "args": "{...}", - "env": "{...}", - "cwd": "[..]" - }, - { - "package_name": "foo", - "package_version": "0.0.1", - "target_kind": ["custom-build"], - "compile_mode": "run-custom-build", - "kind": null, - "deps": [2], - "outputs": [], - "links": {}, - "program": "[..]/foo/target/debug/build/foo-[..]/metabuild-foo", - "args": [], - "env": "{...}", - "cwd": "[..]" - }, - { - "package_name": "foo", - "package_version": "0.0.1", - "target_kind": ["lib"], - "compile_mode": "build", - "kind": null, - "deps": [3], - "outputs": [ - "[..]/foo/target/debug/deps/libfoo-[..].rlib", - "[..]/foo/target/debug/deps/libfoo-[..].rmeta" - ], - "links": "{...}", - "program": "rustc", - "args": "{...}", - "env": "{...}", - "cwd": "[..]" - } - ], - "inputs": [ - "[..]/foo/Cargo.toml", - "[..]/foo/mb/Cargo.toml", - "[..]/foo/mb-other/Cargo.toml" - ] - } - "#, - ) - .run(); - - assert_eq!(p.glob("target/.metabuild/metabuild-foo-*.rs").count(), 1); -} - -#[cargo_test] -fn metabuild_two_versions() { - // Two versions of a metabuild dep with the same name. - let p = project() - .at("ws") - .file( - "Cargo.toml", - r#" - [workspace] - members = ["member1", "member2"] - "#, - ) - .file( - "member1/Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "member1" - version = "0.0.1" - metabuild = ["mb"] - - [build-dependencies] - mb = {path="../../mb1"} - "#, - ) - .file("member1/src/lib.rs", "") - .file( - "member2/Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "member2" - version = "0.0.1" - metabuild = ["mb"] - - [build-dependencies] - mb = {path="../../mb2"} - "#, - ) - .file("member2/src/lib.rs", "") - .build(); - - project().at("mb1") - .file("Cargo.toml", r#" - [package] - name = "mb" - version = "0.0.1" - "#) - .file( - "src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb1 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, - ) - .build(); - - project().at("mb2") - .file("Cargo.toml", r#" - [package] - name = "mb" - version = "0.0.2" - "#) - .file( - "src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb2 {}", std::env::var("CARGO_MANIFEST_DIR").unwrap()); }"#, - ) - .build(); - - p.cargo("build -vv --workspace") - .masquerade_as_nightly_cargo() - .with_stdout_contains("[member1 0.0.1] Hello mb1 [..]member1") - .with_stdout_contains("[member2 0.0.1] Hello mb2 [..]member2") - .run(); - - assert_eq!( - p.glob("target/.metabuild/metabuild-member?-*.rs").count(), - 2 - ); -} - -#[cargo_test] -fn metabuild_external_dependency() { - Package::new("mb", "1.0.0") - .file("Cargo.toml", &basic_manifest("mb", "1.0.0")) - .file( - "src/lib.rs", - r#"pub fn metabuild() { println!("Hello mb"); }"#, - ) - .publish(); - Package::new("dep", "1.0.0") - .file( - "Cargo.toml", - r#" - cargo-features = ["metabuild"] - [package] - name = "dep" - version = "1.0.0" - metabuild = ["mb"] - - [build-dependencies] - mb = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build_dep("mb", "1.0.0") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - [dependencies] - dep = "1.0" - "#, - ) - .file("src/lib.rs", "extern crate dep;") - .build(); - - p.cargo("build -vv") - .masquerade_as_nightly_cargo() - .with_stdout_contains("[dep 1.0.0] Hello mb") - .run(); - - assert_eq!(p.glob("target/.metabuild/metabuild-dep-*.rs").count(), 1); -} - -#[cargo_test] -fn metabuild_json_artifact() { - let p = basic_project(); - p.cargo("build --message-format=json") - .masquerade_as_nightly_cargo() - .with_json_contains_unordered( - r#" - { - "executable": null, - "features": [], - "filenames": "{...}", - "fresh": false, - "package_id": "foo [..]", - "manifest_path": "[..]", - "profile": "{...}", - "reason": "compiler-artifact", - "target": { - "crate_types": [ - "bin" - ], - "doc": false, - "doctest": false, - "edition": "2018", - "kind": [ - "custom-build" - ], - "name": "metabuild-foo", - "src_path": "[..]/foo/target/.metabuild/metabuild-foo-[..].rs", - "test": false - } - } - - { - "cfgs": [], - "env": [], - "linked_libs": [], - "linked_paths": [], - "package_id": "foo [..]", - "out_dir": "[..]", - "reason": "build-script-executed" - } - "#, - ) - .run(); -} - -#[cargo_test] -fn metabuild_failed_build_json() { - let p = basic_project(); - // Modify the metabuild dep so that it fails to compile. - p.change_file("mb/src/lib.rs", ""); - p.cargo("build --message-format=json") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_json_contains_unordered( - r#" - { - "message": { - "children": "{...}", - "code": "{...}", - "level": "error", - "message": "cannot find function `metabuild` in [..] `mb`", - "rendered": "{...}", - "spans": "{...}" - }, - "package_id": "foo [..]", - "manifest_path": "[..]", - "reason": "compiler-message", - "target": { - "crate_types": [ - "bin" - ], - "doc": false, - "doctest": false, - "edition": "2018", - "kind": [ - "custom-build" - ], - "name": "metabuild-foo", - "src_path": null, - "test": false - } - } - "#, - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/metadata.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/metadata.rs deleted file mode 100644 index bc39d86ed..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/metadata.rs +++ /dev/null @@ -1,3097 +0,0 @@ -//! Tests for the `cargo metadata` command. - -use cargo_test_support::install::cargo_home; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, main_file, project, rustc_host}; -use serde_json::json; - -#[cargo_test] -fn cargo_metadata_simple() { - let p = project() - .file("src/foo.rs", "") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "name": "foo", - "version": "0.5.0", - "id": "foo[..]", - "keywords": [], - "source": null, - "dependencies": [], - "edition": "2015", - "license": null, - "license_file": null, - "links": null, - "description": null, - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "homepage": null, - "documentation": null, - "targets": [ - { - "kind": [ - "bin" - ], - "crate_types": [ - "bin" - ], - "doc": true, - "doctest": false, - "test": true, - "edition": "2015", - "name": "foo", - "src_path": "[..]/foo/src/foo.rs" - } - ], - "features": {}, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null - } - ], - "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [], - "id": "foo 0.5.0 (path+file:[..]foo)" - } - ], - "root": "foo 0.5.0 (path+file:[..]foo)" - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn cargo_metadata_warns_on_implicit_version() { - let p = project() - .file("src/foo.rs", "") - .file("Cargo.toml", &basic_bin_manifest("foo")) - .build(); - - p.cargo("metadata").with_stderr("[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems").run(); - - p.cargo("metadata --format-version 1").with_stderr("").run(); -} - -#[cargo_test] -fn library_with_several_crate_types() { - let p = project() - .file("src/lib.rs", "") - .file( - "Cargo.toml", - r#" -[package] -name = "foo" -version = "0.5.0" - -[lib] -crate-type = ["lib", "staticlib"] - "#, - ) - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [], - "categories": [], - "default_run": null, - "name": "foo", - "readme": null, - "repository": null, - "homepage": null, - "documentation": null, - "version": "0.5.0", - "rust_version": null, - "id": "foo[..]", - "keywords": [], - "source": null, - "dependencies": [], - "edition": "2015", - "license": null, - "license_file": null, - "links": null, - "description": null, - "targets": [ - { - "kind": [ - "lib", - "staticlib" - ], - "crate_types": [ - "lib", - "staticlib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "foo", - "src_path": "[..]/foo/src/lib.rs" - } - ], - "features": {}, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null - } - ], - "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [], - "id": "foo 0.5.0 (path+file:[..]foo)" - } - ], - "root": "foo 0.5.0 (path+file:[..]foo)" - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn library_with_features() { - let p = project() - .file("src/lib.rs", "") - .file( - "Cargo.toml", - r#" -[package] -name = "foo" -version = "0.5.0" - -[features] -default = ["default_feat"] -default_feat = [] -optional_feat = [] - "#, - ) - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [], - "categories": [], - "default_run": null, - "name": "foo", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "version": "0.5.0", - "id": "foo[..]", - "keywords": [], - "source": null, - "dependencies": [], - "edition": "2015", - "license": null, - "license_file": null, - "links": null, - "description": null, - "targets": [ - { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "foo", - "src_path": "[..]/foo/src/lib.rs" - } - ], - "features": { - "default": [ - "default_feat" - ], - "default_feat": [], - "optional_feat": [] - }, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null - } - ], - "workspace_members": ["foo 0.5.0 (path+file:[..]foo)"], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [ - "default", - "default_feat" - ], - "id": "foo 0.5.0 (path+file:[..]foo)" - } - ], - "root": "foo 0.5.0 (path+file:[..]foo)" - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn cargo_metadata_with_deps_and_version() { - let p = project() - .file("src/foo.rs", "") - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - license = "MIT" - description = "foo" - - [[bin]] - name = "foo" - - [dependencies] - bar = "*" - [dev-dependencies] - foobar = "*" - "#, - ) - .build(); - Package::new("baz", "0.0.1").publish(); - Package::new("foobar", "0.0.1").publish(); - Package::new("bar", "0.0.1").dep("baz", "0.0.1").publish(); - - p.cargo("metadata -q --format-version 1") - .with_json( - r#" - { - "packages": [ - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [ - { - "features": [], - "kind": null, - "name": "baz", - "optional": false, - "registry": null, - "rename": null, - "req": "^0.0.1", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "target": null, - "uses_default_features": true - } - ], - "description": null, - "edition": "2015", - "features": {}, - "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null, - "name": "bar", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "bar", - "src_path": "[..]src/lib.rs" - } - ], - "version": "0.0.1" - }, - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "edition": "2015", - "features": {}, - "id": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null, - "name": "baz", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "baz", - "src_path": "[..]src/lib.rs" - } - ], - "version": "0.0.1" - }, - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [ - { - "features": [], - "kind": null, - "name": "bar", - "optional": false, - "registry": null, - "rename": null, - "req": "*", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "target": null, - "uses_default_features": true - }, - { - "features": [], - "kind": "dev", - "name": "foobar", - "optional": false, - "registry": null, - "rename": null, - "req": "*", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "target": null, - "uses_default_features": true - } - ], - "description": "foo", - "edition": "2015", - "features": {}, - "id": "foo 0.5.0 (path+file:[..]foo)", - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null, - "name": "foo", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": null, - "targets": [ - { - "crate_types": [ - "bin" - ], - "doc": true, - "doctest": false, - "test": true, - "edition": "2015", - "kind": [ - "bin" - ], - "name": "foo", - "src_path": "[..]src/foo.rs" - } - ], - "version": "0.5.0" - }, - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "edition": "2015", - "features": {}, - "id": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null, - "name": "foobar", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "foobar", - "src_path": "[..]src/lib.rs" - } - ], - "version": "0.0.1" - } - ], - "resolve": { - "nodes": [ - { - "dependencies": [ - "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - ], - "deps": [ - { - "dep_kinds": [ - { - "kind": null, - "target": null - } - ], - "name": "baz", - "pkg": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - } - ], - "features": [], - "id": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - }, - { - "dependencies": [], - "deps": [], - "features": [], - "id": "baz 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - }, - { - "dependencies": [ - "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - ], - "deps": [ - { - "dep_kinds": [ - { - "kind": null, - "target": null - } - ], - "name": "bar", - "pkg": "bar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - }, - { - "dep_kinds": [ - { - "kind": "dev", - "target": null - } - ], - "name": "foobar", - "pkg": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - } - ], - "features": [], - "id": "foo 0.5.0 (path+file:[..]foo)" - }, - { - "dependencies": [], - "deps": [], - "features": [], - "id": "foobar 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - } - ], - "root": "foo 0.5.0 (path+file:[..]foo)" - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_members": [ - "foo 0.5.0 (path+file:[..]foo)" - ], - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn example() { - let p = project() - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .file( - "Cargo.toml", - r#" -[package] -name = "foo" -version = "0.1.0" - -[[example]] -name = "ex" - "#, - ) - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [], - "categories": [], - "default_run": null, - "name": "foo", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "version": "0.1.0", - "id": "foo[..]", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "description": null, - "edition": "2015", - "source": null, - "dependencies": [], - "targets": [ - { - "kind": [ "lib" ], - "crate_types": [ "lib" ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "foo", - "src_path": "[..]/foo/src/lib.rs" - }, - { - "kind": [ "example" ], - "crate_types": [ "bin" ], - "doc": false, - "doctest": false, - "test": false, - "edition": "2015", - "name": "ex", - "src_path": "[..]/foo/examples/ex.rs" - } - ], - "features": {}, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null - } - ], - "workspace_members": [ - "foo 0.1.0 (path+file:[..]foo)" - ], - "resolve": { - "root": "foo 0.1.0 (path+file://[..]foo)", - "nodes": [ - { - "id": "foo 0.1.0 (path+file:[..]foo)", - "features": [], - "dependencies": [], - "deps": [] - } - ] - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn example_lib() { - let p = project() - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .file( - "Cargo.toml", - r#" -[package] -name = "foo" -version = "0.1.0" - -[[example]] -name = "ex" -crate-type = ["rlib", "dylib"] - "#, - ) - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [], - "categories": [], - "default_run": null, - "name": "foo", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "version": "0.1.0", - "id": "foo[..]", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "description": null, - "edition": "2015", - "source": null, - "dependencies": [], - "targets": [ - { - "kind": [ "lib" ], - "crate_types": [ "lib" ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "foo", - "src_path": "[..]/foo/src/lib.rs" - }, - { - "kind": [ "example" ], - "crate_types": [ "rlib", "dylib" ], - "doc": false, - "doctest": false, - "test": false, - "edition": "2015", - "name": "ex", - "src_path": "[..]/foo/examples/ex.rs" - } - ], - "features": {}, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null - } - ], - "workspace_members": [ - "foo 0.1.0 (path+file:[..]foo)" - ], - "resolve": { - "root": "foo 0.1.0 (path+file://[..]foo)", - "nodes": [ - { - "id": "foo 0.1.0 (path+file:[..]foo)", - "features": [], - "dependencies": [], - "deps": [] - } - ] - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn workspace_metadata() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - - [workspace.metadata] - tool1 = "hello" - tool2 = [1, 2, 3] - - [workspace.metadata.foo] - bar = 3 - - "#, - ) - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .file("baz/Cargo.toml", &basic_lib_manifest("baz")) - .file("baz/src/lib.rs", "") - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "name": "bar", - "version": "0.5.0", - "id": "bar[..]", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "keywords": [], - "source": null, - "dependencies": [], - "license": null, - "license_file": null, - "links": null, - "description": null, - "edition": "2015", - "targets": [ - { - "kind": [ "lib" ], - "crate_types": [ "lib" ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "bar", - "src_path": "[..]bar/src/lib.rs" - } - ], - "features": {}, - "manifest_path": "[..]bar/Cargo.toml", - "metadata": null, - "publish": null - }, - { - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "name": "baz", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "version": "0.5.0", - "id": "baz[..]", - "keywords": [], - "source": null, - "dependencies": [], - "license": null, - "license_file": null, - "links": null, - "description": null, - "edition": "2015", - "targets": [ - { - "kind": [ "lib" ], - "crate_types": [ "lib" ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "baz", - "src_path": "[..]baz/src/lib.rs" - } - ], - "features": {}, - "manifest_path": "[..]baz/Cargo.toml", - "metadata": null, - "publish": null - } - ], - "workspace_members": ["bar 0.5.0 (path+file:[..]bar)", "baz 0.5.0 (path+file:[..]baz)"], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [], - "id": "bar 0.5.0 (path+file:[..]bar)" - }, - { - "dependencies": [], - "deps": [], - "features": [], - "id": "baz 0.5.0 (path+file:[..]baz)" - } - ], - "root": null - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": { - "tool1": "hello", - "tool2": [1, 2, 3], - "foo": { - "bar": 3 - } - } - }"#, - ) - .run(); -} - -#[cargo_test] -fn workspace_metadata_no_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .file("baz/Cargo.toml", &basic_lib_manifest("baz")) - .file("baz/src/lib.rs", "") - .build(); - - p.cargo("metadata --no-deps") - .with_json( - r#" - { - "packages": [ - { - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "name": "bar", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "version": "0.5.0", - "id": "bar[..]", - "keywords": [], - "source": null, - "dependencies": [], - "license": null, - "license_file": null, - "links": null, - "description": null, - "edition": "2015", - "targets": [ - { - "kind": [ "lib" ], - "crate_types": [ "lib" ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "bar", - "src_path": "[..]bar/src/lib.rs" - } - ], - "features": {}, - "manifest_path": "[..]bar/Cargo.toml", - "metadata": null, - "publish": null - }, - { - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "name": "baz", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "version": "0.5.0", - "id": "baz[..]", - "keywords": [], - "source": null, - "dependencies": [], - "license": null, - "license_file": null, - "links": null, - "description": null, - "edition": "2015", - "targets": [ - { - "kind": [ "lib" ], - "crate_types": ["lib"], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "baz", - "src_path": "[..]baz/src/lib.rs" - } - ], - "features": {}, - "manifest_path": "[..]baz/Cargo.toml", - "metadata": null, - "publish": null - } - ], - "workspace_members": ["bar 0.5.0 (path+file:[..]bar)", "baz 0.5.0 (path+file:[..]baz)"], - "resolve": null, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn cargo_metadata_with_invalid_manifest() { - let p = project().file("Cargo.toml", "").build(); - - p.cargo("metadata --format-version 1") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - virtual manifests must be configured with [workspace]", - ) - .run(); -} - -const MANIFEST_OUTPUT: &str = r#" -{ - "packages": [{ - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "name":"foo", - "version":"0.5.0", - "id":"foo[..]0.5.0[..](path+file://[..]/foo)", - "source":null, - "dependencies":[], - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "description": null, - "edition": "2015", - "targets":[{ - "kind":["bin"], - "crate_types":["bin"], - "doc": true, - "doctest": false, - "test": true, - "edition": "2015", - "name":"foo", - "src_path":"[..]/foo/src/foo.rs" - }], - "features":{}, - "manifest_path":"[..]Cargo.toml", - "metadata": null, - "publish": null, - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null - }], - "workspace_members": [ "foo 0.5.0 (path+file:[..]foo)" ], - "resolve": null, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null -}"#; - -#[cargo_test] -fn cargo_metadata_no_deps_path_to_cargo_toml_relative() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("metadata --no-deps --manifest-path foo/Cargo.toml") - .cwd(p.root().parent().unwrap()) - .with_json(MANIFEST_OUTPUT) - .run(); -} - -#[cargo_test] -fn cargo_metadata_no_deps_path_to_cargo_toml_absolute() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("metadata --no-deps --manifest-path") - .arg(p.root().join("Cargo.toml")) - .cwd(p.root().parent().unwrap()) - .with_json(MANIFEST_OUTPUT) - .run(); -} - -#[cargo_test] -fn cargo_metadata_no_deps_path_to_cargo_toml_parent_relative() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("metadata --no-deps --manifest-path foo") - .cwd(p.root().parent().unwrap()) - .with_status(101) - .with_stderr( - "[ERROR] the manifest-path must be \ - a path to a Cargo.toml file", - ) - .run(); -} - -#[cargo_test] -fn cargo_metadata_no_deps_path_to_cargo_toml_parent_absolute() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("metadata --no-deps --manifest-path") - .arg(p.root()) - .cwd(p.root().parent().unwrap()) - .with_status(101) - .with_stderr( - "[ERROR] the manifest-path must be \ - a path to a Cargo.toml file", - ) - .run(); -} - -#[cargo_test] -fn cargo_metadata_no_deps_cwd() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("metadata --no-deps") - .with_json(MANIFEST_OUTPUT) - .run(); -} - -#[cargo_test] -fn cargo_metadata_bad_version() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("metadata --no-deps --format-version 2") - .with_status(1) - .with_stderr_contains( - "\ -error: '2' isn't a valid value for '--format-version ' -[possible values: 1] -", - ) - .run(); -} - -#[cargo_test] -fn multiple_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [features] - a = [] - b = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("metadata --features").arg("a b").run(); -} - -#[cargo_test] -fn package_metadata() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = ["wycats@example.com"] - categories = ["database"] - keywords = ["database"] - readme = "README.md" - repository = "https://github.com/rust-lang/cargo" - homepage = "https://rust-lang.org" - documentation = "https://doc.rust-lang.org/stable/std/" - - [package.metadata.bar] - baz = "quux" - "#, - ) - .file("README.md", "") - .file("src/lib.rs", "") - .build(); - - p.cargo("metadata --no-deps") - .with_json( - r#" - { - "packages": [ - { - "authors": ["wycats@example.com"], - "categories": ["database"], - "default_run": null, - "name": "foo", - "readme": "README.md", - "repository": "https://github.com/rust-lang/cargo", - "rust_version": null, - "homepage": "https://rust-lang.org", - "documentation": "https://doc.rust-lang.org/stable/std/", - "version": "0.1.0", - "id": "foo[..]", - "keywords": ["database"], - "source": null, - "dependencies": [], - "edition": "2015", - "license": null, - "license_file": null, - "links": null, - "description": null, - "targets": [ - { - "kind": [ "lib" ], - "crate_types": [ "lib" ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "foo", - "src_path": "[..]foo/src/lib.rs" - } - ], - "features": {}, - "manifest_path": "[..]foo/Cargo.toml", - "metadata": { - "bar": { - "baz": "quux" - } - }, - "publish": null - } - ], - "workspace_members": ["foo[..]"], - "resolve": null, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn package_publish() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = ["wycats@example.com"] - categories = ["database"] - keywords = ["database"] - readme = "README.md" - repository = "https://github.com/rust-lang/cargo" - publish = ["my-registry"] - "#, - ) - .file("README.md", "") - .file("src/lib.rs", "") - .build(); - - p.cargo("metadata --no-deps") - .with_json( - r#" - { - "packages": [ - { - "authors": ["wycats@example.com"], - "categories": ["database"], - "default_run": null, - "name": "foo", - "readme": "README.md", - "repository": "https://github.com/rust-lang/cargo", - "rust_version": null, - "homepage": null, - "documentation": null, - "version": "0.1.0", - "id": "foo[..]", - "keywords": ["database"], - "source": null, - "dependencies": [], - "edition": "2015", - "license": null, - "license_file": null, - "links": null, - "description": null, - "targets": [ - { - "kind": [ "lib" ], - "crate_types": [ "lib" ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "name": "foo", - "src_path": "[..]foo/src/lib.rs" - } - ], - "features": {}, - "manifest_path": "[..]foo/Cargo.toml", - "metadata": null, - "publish": ["my-registry"] - } - ], - "workspace_members": ["foo[..]"], - "resolve": null, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null - }"#, - ) - .run(); -} - -#[cargo_test] -fn cargo_metadata_path_to_cargo_toml_project() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("package --manifest-path") - .arg(p.root().join("bar/Cargo.toml")) - .cwd(p.root().parent().unwrap()) - .run(); - - p.cargo("metadata --manifest-path") - .arg(p.root().join("target/package/bar-0.5.0/Cargo.toml")) - .with_json( - r#" - { - "packages": [ - { - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "edition": "2015", - "features": {}, - "id": "bar 0.5.0 ([..])", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null, - "name": "bar", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": null, - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "bar", - "src_path": "[..]src/lib.rs" - } - ], - "version": "0.5.0" - } - ], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [], - "id": "bar 0.5.0 ([..])" - } - ], - "root": "bar 0.5.0 (path+file:[..])" - }, - "target_directory": "[..]", - "version": 1, - "workspace_members": [ - "bar 0.5.0 (path+file:[..])" - ], - "workspace_root": "[..]", - "metadata": null - } - "#, - ) - .run(); -} - -#[cargo_test] -fn package_edition_2018() { - let p = project() - .file("src/lib.rs", "") - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = ["wycats@example.com"] - edition = "2018" - "#, - ) - .build(); - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "edition": "2018", - "features": {}, - "id": "foo 0.1.0 (path+file:[..])", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null, - "name": "foo", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": null, - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2018", - "kind": [ - "lib" - ], - "name": "foo", - "src_path": "[..]src/lib.rs" - } - ], - "version": "0.1.0" - } - ], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [], - "id": "foo 0.1.0 (path+file:[..])" - } - ], - "root": "foo 0.1.0 (path+file:[..])" - }, - "target_directory": "[..]", - "version": 1, - "workspace_members": [ - "foo 0.1.0 (path+file:[..])" - ], - "workspace_root": "[..]", - "metadata": null - } - "#, - ) - .run(); -} - -#[cargo_test] -fn package_default_run() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#) - .file("src/bin/b.rs", r#"fn main() { println!("hello B"); }"#) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = ["wycats@example.com"] - edition = "2018" - default-run = "a" - "#, - ) - .build(); - let json = p.cargo("metadata").run_json(); - assert_eq!(json["packages"][0]["default_run"], json!("a")); -} - -#[cargo_test] -fn package_rust_version() { - let p = project() - .file("src/lib.rs", "") - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = ["wycats@example.com"] - edition = "2018" - rust-version = "1.56" - "#, - ) - .build(); - let json = p.cargo("metadata").run_json(); - assert_eq!(json["packages"][0]["rust_version"], json!("1.56")); -} - -#[cargo_test] -fn target_edition_2018() { - let p = project() - .file("src/lib.rs", "") - .file("src/main.rs", "") - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = ["wycats@example.com"] - edition = "2015" - - [lib] - edition = "2018" - "#, - ) - .build(); - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "edition": "2015", - "features": {}, - "id": "foo 0.1.0 (path+file:[..])", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]Cargo.toml", - "metadata": null, - "publish": null, - "name": "foo", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": null, - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2018", - "kind": [ - "lib" - ], - "name": "foo", - "src_path": "[..]src/lib.rs" - }, - { - "crate_types": [ - "bin" - ], - "doc": true, - "doctest": false, - "test": true, - "edition": "2015", - "kind": [ - "bin" - ], - "name": "foo", - "src_path": "[..]src/main.rs" - } - ], - "version": "0.1.0" - } - ], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [], - "id": "foo 0.1.0 (path+file:[..])" - } - ], - "root": "foo 0.1.0 (path+file:[..])" - }, - "target_directory": "[..]", - "version": 1, - "workspace_members": [ - "foo 0.1.0 (path+file:[..])" - ], - "workspace_root": "[..]", - "metadata": null - } - "#, - ) - .run(); -} - -#[cargo_test] -fn rename_dependency() { - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.2.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { version = "0.1.0" } - baz = { version = "0.2.0", package = "bar" } - "#, - ) - .file("src/lib.rs", "extern crate bar; extern crate baz;") - .build(); - - p.cargo("metadata") - .with_json( - r#" -{ - "packages": [ - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "edition": "2015", - "features": {}, - "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]", - "metadata": null, - "publish": null, - "name": "bar", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "bar", - "src_path": "[..]" - } - ], - "version": "0.1.0" - }, - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "edition": "2015", - "features": {}, - "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]", - "metadata": null, - "publish": null, - "name": "bar", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "bar", - "src_path": "[..]" - } - ], - "version": "0.2.0" - }, - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [ - { - "features": [], - "kind": null, - "name": "bar", - "optional": false, - "rename": null, - "registry": null, - "req": "^0.1.0", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "target": null, - "uses_default_features": true - }, - { - "features": [], - "kind": null, - "name": "bar", - "optional": false, - "rename": "baz", - "registry": null, - "req": "^0.2.0", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "target": null, - "uses_default_features": true - } - ], - "description": null, - "edition": "2015", - "features": {}, - "id": "foo 0.0.1[..]", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]", - "metadata": null, - "publish": null, - "name": "foo", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": null, - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "foo", - "src_path": "[..]" - } - ], - "version": "0.0.1" - } - ], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [], - "id": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" - }, - { - "dependencies": [], - "deps": [], - "features": [], - "id": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" - }, - { - "dependencies": [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" - ], - "deps": [ - { - "dep_kinds": [ - { - "kind": null, - "target": null - } - ], - "name": "bar", - "pkg": "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" - }, - { - "dep_kinds": [ - { - "kind": null, - "target": null - } - ], - "name": "baz", - "pkg": "bar 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" - } - ], - "features": [], - "id": "foo 0.0.1[..]" - } - ], - "root": "foo 0.0.1[..]" - }, - "target_directory": "[..]", - "version": 1, - "workspace_members": [ - "foo 0.0.1[..]" - ], - "workspace_root": "[..]", - "metadata": null -}"#, - ) - .run(); -} - -#[cargo_test] -fn metadata_links() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - links = "a" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "edition": "2015", - "features": {}, - "id": "foo 0.5.0 [..]", - "keywords": [], - "license": null, - "license_file": null, - "links": "a", - "manifest_path": "[..]/foo/Cargo.toml", - "metadata": null, - "publish": null, - "name": "foo", - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "source": null, - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "foo", - "src_path": "[..]/foo/src/lib.rs" - }, - { - "crate_types": [ - "bin" - ], - "doc": false, - "doctest": false, - "test": false, - "edition": "2015", - "kind": [ - "custom-build" - ], - "name": "build-script-build", - "src_path": "[..]/foo/build.rs" - } - ], - "version": "0.5.0" - } - ], - "resolve": { - "nodes": [ - { - "dependencies": [], - "deps": [], - "features": [], - "id": "foo 0.5.0 [..]" - } - ], - "root": "foo 0.5.0 [..]" - }, - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_members": [ - "foo 0.5.0 [..]" - ], - "workspace_root": "[..]/foo", - "metadata": null - } - "#, - ) - .run() -} - -#[cargo_test] -fn deps_with_bin_only() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - bdep = { path = "bdep" } - "#, - ) - .file("src/lib.rs", "") - .file("bdep/Cargo.toml", &basic_bin_manifest("bdep")) - .file("bdep/src/main.rs", "fn main() {}") - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": [ - { - "name": "foo", - "version": "0.1.0", - "id": "foo 0.1.0 ([..])", - "license": null, - "license_file": null, - "description": null, - "source": null, - "dependencies": [ - { - "name": "bdep", - "source": null, - "req": "*", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "path": "[..]/foo/bdep", - "features": [], - "target": null, - "registry": null - } - ], - "targets": [ - { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "name": "foo", - "src_path": "[..]/foo/src/lib.rs", - "edition": "2015", - "doc": true, - "doctest": true, - "test": true - } - ], - "features": {}, - "manifest_path": "[..]/foo/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - ], - "workspace_members": [ - "foo 0.1.0 ([..])" - ], - "resolve": { - "nodes": [ - { - "id": "foo 0.1.0 ([..])", - "dependencies": [], - "deps": [], - "features": [] - } - ], - "root": "foo 0.1.0 ([..])" - }, - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_root": "[..]foo", - "metadata": null - } - "#, - ) - .run(); -} - -#[cargo_test] -fn filter_platform() { - // Testing the --filter-platform flag. - Package::new("normal-dep", "0.0.1").publish(); - Package::new("host-dep", "0.0.1").publish(); - Package::new("alt-dep", "0.0.1").publish(); - Package::new("cfg-dep", "0.0.1").publish(); - // Just needs to be a valid target that is different from host. - // Presumably nobody runs these tests on wasm. ๐Ÿ™ƒ - let alt_target = "wasm32-unknown-unknown"; - let host_target = rustc_host(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - normal-dep = "0.0.1" - - [target.{}.dependencies] - host-dep = "0.0.1" - - [target.{}.dependencies] - alt-dep = "0.0.1" - - [target.'cfg(foobar)'.dependencies] - cfg-dep = "0.0.1" - "#, - host_target, alt_target - ), - ) - .file("src/lib.rs", "") - .build(); - - let alt_dep = r#" - { - "name": "alt-dep", - "version": "0.0.1", - "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "dependencies": [], - "targets": [ - { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "name": "alt-dep", - "src_path": "[..]/alt-dep-0.0.1/src/lib.rs", - "edition": "2015", - "test": true, - "doc": true, - "doctest": true - } - ], - "features": {}, - "manifest_path": "[..]/alt-dep-0.0.1/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - "#; - - let cfg_dep = r#" - { - "name": "cfg-dep", - "version": "0.0.1", - "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "dependencies": [], - "targets": [ - { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "name": "cfg-dep", - "src_path": "[..]/cfg-dep-0.0.1/src/lib.rs", - "edition": "2015", - "test": true, - "doc": true, - "doctest": true - } - ], - "features": {}, - "manifest_path": "[..]/cfg-dep-0.0.1/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - "#; - - let host_dep = r#" - { - "name": "host-dep", - "version": "0.0.1", - "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "dependencies": [], - "targets": [ - { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "name": "host-dep", - "src_path": "[..]/host-dep-0.0.1/src/lib.rs", - "edition": "2015", - "test": true, - "doc": true, - "doctest": true - } - ], - "features": {}, - "manifest_path": "[..]/host-dep-0.0.1/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - "#; - - let normal_dep = r#" - { - "name": "normal-dep", - "version": "0.0.1", - "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "license": null, - "license_file": null, - "description": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "dependencies": [], - "targets": [ - { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "name": "normal-dep", - "src_path": "[..]/normal-dep-0.0.1/src/lib.rs", - "edition": "2015", - "test": true, - "doc": true, - "doctest": true - } - ], - "features": {}, - "manifest_path": "[..]/normal-dep-0.0.1/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - "#; - - // The dependencies are stored in sorted order by target and then by name. - // Since the testsuite may run on different targets, this needs to be - // sorted before it can be compared. - let mut foo_deps = serde_json::json!([ - { - "name": "normal-dep", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": null, - "registry": null - }, - { - "name": "cfg-dep", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": "cfg(foobar)", - "registry": null - }, - { - "name": "alt-dep", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": alt_target, - "registry": null - }, - { - "name": "host-dep", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "req": "^0.0.1", - "kind": null, - "rename": null, - "optional": false, - "uses_default_features": true, - "features": [], - "target": host_target, - "registry": null - } - ]); - foo_deps.as_array_mut().unwrap().sort_by(|a, b| { - // This really should be `rename`, but not needed here. - // Also, sorting on `name` isn't really necessary since this test - // only has one package per target, but leaving it here to be safe. - let a = (a["target"].as_str(), a["name"].as_str()); - let b = (b["target"].as_str(), b["name"].as_str()); - a.cmp(&b) - }); - - let foo = r#" - { - "name": "foo", - "version": "0.1.0", - "id": "foo 0.1.0 (path+file:[..]foo)", - "license": null, - "license_file": null, - "description": null, - "source": null, - "dependencies": - $FOO_DEPS, - "targets": [ - { - "kind": [ - "lib" - ], - "crate_types": [ - "lib" - ], - "name": "foo", - "src_path": "[..]/foo/src/lib.rs", - "edition": "2015", - "test": true, - "doc": true, - "doctest": true - } - ], - "features": {}, - "manifest_path": "[..]/foo/Cargo.toml", - "metadata": null, - "publish": null, - "authors": [], - "categories": [], - "default_run": null, - "keywords": [], - "readme": null, - "repository": null, - "rust_version": null, - "homepage": null, - "documentation": null, - "edition": "2015", - "links": null - } - "# - .replace("$ALT_TRIPLE", alt_target) - .replace("$HOST_TRIPLE", host_target) - .replace("$FOO_DEPS", &foo_deps.to_string()); - - // We're going to be checking that we don't download excessively, - // so we need to ensure that downloads will happen. - let clear = || { - cargo_home().join("registry/cache").rm_rf(); - cargo_home().join("registry/src").rm_rf(); - p.build_dir().rm_rf(); - }; - - // Normal metadata, no filtering, returns *everything*. - p.cargo("metadata") - .with_stderr_unordered( - "\ -[UPDATING] [..] -[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems -[DOWNLOADING] crates ... -[DOWNLOADED] normal-dep v0.0.1 [..] -[DOWNLOADED] host-dep v0.0.1 [..] -[DOWNLOADED] alt-dep v0.0.1 [..] -[DOWNLOADED] cfg-dep v0.0.1 [..] -", - ) - .with_json( - &r#" -{ - "packages": [ - $ALT_DEP, - $CFG_DEP, - $FOO, - $HOST_DEP, - $NORMAL_DEP - ], - "workspace_members": [ - "foo 0.1.0 (path+file:[..]foo)" - ], - "resolve": { - "nodes": [ - { - "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "foo 0.1.0 (path+file:[..]foo)", - "dependencies": [ - "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - ], - "deps": [ - { - "name": "alt_dep", - "pkg": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": "$ALT_TRIPLE" - } - ] - }, - { - "name": "cfg_dep", - "pkg": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": "cfg(foobar)" - } - ] - }, - { - "name": "host_dep", - "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": "$HOST_TRIPLE" - } - ] - }, - { - "name": "normal_dep", - "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": null - } - ] - } - ], - "features": [] - }, - { - "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - } - ], - "root": "foo 0.1.0 (path+file:[..]foo)" - }, - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null -} -"# - .replace("$ALT_TRIPLE", alt_target) - .replace("$HOST_TRIPLE", host_target) - .replace("$ALT_DEP", alt_dep) - .replace("$CFG_DEP", cfg_dep) - .replace("$HOST_DEP", host_dep) - .replace("$NORMAL_DEP", normal_dep) - .replace("$FOO", &foo), - ) - .run(); - clear(); - - // Filter on alternate, removes cfg and host. - p.cargo("metadata --filter-platform") - .arg(alt_target) - .with_stderr_unordered( - "\ -[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems -[DOWNLOADING] crates ... -[DOWNLOADED] normal-dep v0.0.1 [..] -[DOWNLOADED] host-dep v0.0.1 [..] -[DOWNLOADED] alt-dep v0.0.1 [..] -", - ) - .with_json( - &r#" -{ - "packages": [ - $ALT_DEP, - $FOO, - $NORMAL_DEP - ], - "workspace_members": "{...}", - "resolve": { - "nodes": [ - { - "id": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "foo 0.1.0 (path+file:[..]foo)", - "dependencies": [ - "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - ], - "deps": [ - { - "name": "alt_dep", - "pkg": "alt-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": "$ALT_TRIPLE" - } - ] - }, - { - "name": "normal_dep", - "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": null - } - ] - } - ], - "features": [] - }, - { - "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - } - ], - "root": "foo 0.1.0 (path+file:[..]foo)" - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]foo", - "metadata": null -} -"# - .replace("$ALT_TRIPLE", alt_target) - .replace("$ALT_DEP", alt_dep) - .replace("$NORMAL_DEP", normal_dep) - .replace("$FOO", &foo), - ) - .run(); - clear(); - - // Filter on host, removes alt and cfg. - p.cargo("metadata --filter-platform") - .arg(&host_target) - .with_stderr_unordered( - "\ -[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems -[DOWNLOADING] crates ... -[DOWNLOADED] normal-dep v0.0.1 [..] -[DOWNLOADED] host-dep v0.0.1 [..] -", - ) - .with_json( - &r#" -{ - "packages": [ - $FOO, - $HOST_DEP, - $NORMAL_DEP - ], - "workspace_members": "{...}", - "resolve": { - "nodes": [ - { - "id": "foo 0.1.0 (path+file:[..]foo)", - "dependencies": [ - "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - ], - "deps": [ - { - "name": "host_dep", - "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": "$HOST_TRIPLE" - } - ] - }, - { - "name": "normal_dep", - "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": null - } - ] - } - ], - "features": [] - }, - { - "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - } - ], - "root": "foo 0.1.0 (path+file:[..]foo)" - }, - "target_directory": "[..]foo/target", - "version": 1, - "workspace_root": "[..]foo", - "metadata": null -} -"# - .replace("$HOST_TRIPLE", host_target) - .replace("$HOST_DEP", host_dep) - .replace("$NORMAL_DEP", normal_dep) - .replace("$FOO", &foo), - ) - .run(); - clear(); - - // Filter host with cfg, removes alt only - p.cargo("metadata --filter-platform") - .arg(&host_target) - .env("RUSTFLAGS", "--cfg=foobar") - .with_stderr_unordered( - "\ -[WARNING] please specify `--format-version` flag explicitly to avoid compatibility problems -[DOWNLOADING] crates ... -[DOWNLOADED] normal-dep v0.0.1 [..] -[DOWNLOADED] host-dep v0.0.1 [..] -[DOWNLOADED] cfg-dep v0.0.1 [..] -", - ) - .with_json( - &r#" -{ - "packages": [ - $CFG_DEP, - $FOO, - $HOST_DEP, - $NORMAL_DEP - ], - "workspace_members": "{...}", - "resolve": { - "nodes": [ - { - "id": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "foo 0.1.0 (path+file:[..]/foo)", - "dependencies": [ - "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" - ], - "deps": [ - { - "name": "cfg_dep", - "pkg": "cfg-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": "cfg(foobar)" - } - ] - }, - { - "name": "host_dep", - "pkg": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": "$HOST_TRIPLE" - } - ] - }, - { - "name": "normal_dep", - "pkg": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dep_kinds": [ - { - "kind": null, - "target": null - } - ] - } - ], - "features": [] - }, - { - "id": "host-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "normal-dep 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "dependencies": [], - "deps": [], - "features": [] - } - ], - "root": "foo 0.1.0 (path+file:[..]/foo)" - }, - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null -} -"# - .replace("$HOST_TRIPLE", host_target) - .replace("$CFG_DEP", cfg_dep) - .replace("$HOST_DEP", host_dep) - .replace("$NORMAL_DEP", normal_dep) - .replace("$FOO", &foo), - ) - .run(); -} - -#[cargo_test] -fn dep_kinds() { - Package::new("bar", "0.1.0").publish(); - Package::new("winapi", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - - [dev-dependencies] - bar = "0.1" - - [build-dependencies] - bar = "0.1" - - [target.'cfg(windows)'.dependencies] - winapi = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": "{...}", - "workspace_members": "{...}", - "target_directory": "{...}", - "version": 1, - "workspace_root": "{...}", - "metadata": null, - "resolve": { - "nodes": [ - { - "id": "bar 0.1.0 [..]", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "foo 0.1.0 [..]", - "dependencies": [ - "bar 0.1.0 [..]", - "winapi 0.1.0 [..]" - ], - "deps": [ - { - "name": "bar", - "pkg": "bar 0.1.0 [..]", - "dep_kinds": [ - { - "kind": null, - "target": null - }, - { - "kind": "dev", - "target": null - }, - { - "kind": "build", - "target": null - } - ] - }, - { - "name": "winapi", - "pkg": "winapi 0.1.0 [..]", - "dep_kinds": [ - { - "kind": null, - "target": "cfg(windows)" - } - ] - } - ], - "features": [] - }, - { - "id": "winapi 0.1.0 [..]", - "dependencies": [], - "deps": [], - "features": [] - } - ], - "root": "foo 0.1.0 [..]" - } - } - "#, - ) - .run(); -} - -#[cargo_test] -fn dep_kinds_workspace() { - // Check for bug with duplicate dep kinds in a workspace. - // If different members select different features for the same package, - // they show up multiple times in the resolver `deps`. - // - // Here: - // foo -> dep - // bar -> foo[feat1] -> dep - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [features] - feat1 = [] - - [dependencies] - dep = { path="dep" } - - [workspace] - members = ["bar"] - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [dependencies] - foo = { path="..", features=["feat1"] } - "#, - ) - .file("bar/src/lib.rs", "") - .file("dep/Cargo.toml", &basic_lib_manifest("dep")) - .file("dep/src/lib.rs", "") - .build(); - - p.cargo("metadata") - .with_json( - r#" - { - "packages": "{...}", - "workspace_members": "{...}", - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_root": "[..]/foo", - "metadata": null, - "resolve": { - "nodes": [ - { - "id": "bar 0.1.0 (path+file://[..]/foo/bar)", - "dependencies": [ - "foo 0.1.0 (path+file://[..]/foo)" - ], - "deps": [ - { - "name": "foo", - "pkg": "foo 0.1.0 (path+file://[..]/foo)", - "dep_kinds": [ - { - "kind": null, - "target": null - } - ] - } - ], - "features": [] - }, - { - "id": "dep 0.5.0 (path+file://[..]/foo/dep)", - "dependencies": [], - "deps": [], - "features": [] - }, - { - "id": "foo 0.1.0 (path+file://[..]/foo)", - "dependencies": [ - "dep 0.5.0 (path+file://[..]/foo/dep)" - ], - "deps": [ - { - "name": "dep", - "pkg": "dep 0.5.0 (path+file://[..]/foo/dep)", - "dep_kinds": [ - { - "kind": null, - "target": null - } - ] - } - ], - "features": [ - "feat1" - ] - } - ], - "root": "foo 0.1.0 (path+file://[..]/foo)" - } - } - "#, - ) - .run(); -} - -// Creating non-utf8 path is an OS-specific pain, so let's run this only on -// linux, where arbitrary bytes work. -#[cfg(target_os = "linux")] -#[cargo_test] -fn cargo_metadata_non_utf8() { - use std::ffi::OsString; - use std::os::unix::ffi::OsStringExt; - use std::path::PathBuf; - - let base = PathBuf::from(OsString::from_vec(vec![255])); - - let p = project() - .no_manifest() - .file(base.join("./src/lib.rs"), "") - .file(base.join("./Cargo.toml"), &basic_lib_manifest("foo")) - .build(); - - p.cargo("metadata") - .cwd(p.root().join(base)) - .arg("--format-version") - .arg("1") - .with_stderr("error: path contains invalid UTF-8 characters") - .with_status(101) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/minimal_versions.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/minimal_versions.rs deleted file mode 100644 index 9febcc12d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/minimal_versions.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! Tests for minimal-version resolution. -//! -//! Note: Some tests are located in the resolver-tests package. - -use cargo_test_support::project; -use cargo_test_support::registry::Package; - -// Ensure that the "-Z minimal-versions" CLI option works and the minimal -// version of a dependency ends up in the lock file. -#[cargo_test] -fn minimal_version_cli() { - Package::new("dep", "1.0.0").publish(); - Package::new("dep", "1.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.1" - - [dependencies] - dep = "1.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("generate-lockfile -Zminimal-versions") - .masquerade_as_nightly_cargo() - .run(); - - let lock = p.read_lockfile(); - - assert!(!lock.contains("1.1.0")); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/multitarget.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/multitarget.rs deleted file mode 100644 index afa8ea3c9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/multitarget.rs +++ /dev/null @@ -1,144 +0,0 @@ -//! Tests for multiple `--target` flags to subcommands - -use cargo_test_support::{basic_manifest, cross_compile, project, rustc_host}; - -#[cargo_test] -fn double_target_rejected() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --target a --target b") - .with_stderr("error: specifying multiple `--target` flags requires `-Zmultitarget`") - .with_status(101) - .run(); -} - -#[cargo_test] -fn simple_build() { - if cross_compile::disabled() { - return; - } - let t1 = cross_compile::alternate(); - let t2 = rustc_host(); - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -Z multitarget") - .arg("--target") - .arg(&t1) - .arg("--target") - .arg(&t2) - .masquerade_as_nightly_cargo() - .run(); - - assert!(p.target_bin(t1, "foo").is_file()); - assert!(p.target_bin(t2, "foo").is_file()); -} - -#[cargo_test] -fn simple_test() { - if !cross_compile::can_run_on_host() { - return; - } - let t1 = cross_compile::alternate(); - let t2 = rustc_host(); - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/lib.rs", "fn main() {}") - .build(); - - p.cargo("test -Z multitarget") - .arg("--target") - .arg(&t1) - .arg("--target") - .arg(&t2) - .masquerade_as_nightly_cargo() - .with_stderr_contains(&format!("[RUNNING] [..]{}[..]", t1)) - .with_stderr_contains(&format!("[RUNNING] [..]{}[..]", t2)) - .run(); -} - -#[cargo_test] -fn simple_run() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("run -Z multitarget --target a --target b") - .with_stderr("error: only one `--target` argument is supported") - .with_status(101) - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn simple_doc() { - if cross_compile::disabled() { - return; - } - let t1 = cross_compile::alternate(); - let t2 = rustc_host(); - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/lib.rs", "//! empty lib") - .build(); - - p.cargo("doc -Z multitarget") - .arg("--target") - .arg(&t1) - .arg("--target") - .arg(&t2) - .masquerade_as_nightly_cargo() - .run(); - - assert!(p.build_dir().join(&t1).join("doc/foo/index.html").is_file()); - assert!(p.build_dir().join(&t2).join("doc/foo/index.html").is_file()); -} - -#[cargo_test] -fn simple_check() { - if cross_compile::disabled() { - return; - } - let t1 = cross_compile::alternate(); - let t2 = rustc_host(); - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("check -Z multitarget") - .arg("--target") - .arg(&t1) - .arg("--target") - .arg(&t2) - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn same_value_twice() { - if cross_compile::disabled() { - return; - } - let t = rustc_host(); - let p = project() - .file("Cargo.toml", &basic_manifest("foo", "1.0.0")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -Z multitarget") - .arg("--target") - .arg(&t) - .arg("--target") - .arg(&t) - .masquerade_as_nightly_cargo() - .run(); - - assert!(p.target_bin(t, "foo").is_file()); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/net_config.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/net_config.rs deleted file mode 100644 index b145de89b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/net_config.rs +++ /dev/null @@ -1,74 +0,0 @@ -//! Tests for network configuration. - -use cargo_test_support::project; - -#[cargo_test] -fn net_retry_loads_from_config() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "http://127.0.0.1:11/foo/bar" - "#, - ) - .file("src/main.rs", "") - .file( - ".cargo/config", - r#" - [net] - retry=1 - [http] - timeout=1 - "#, - ) - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains( - "[WARNING] spurious network error \ - (1 tries remaining): [..]", - ) - .run(); -} - -#[cargo_test] -fn net_retry_git_outputs_warning() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - git = "http://127.0.0.1:11/foo/bar" - "#, - ) - .file( - ".cargo/config", - r#" - [http] - timeout=1 - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build -v -j 1") - .with_status(101) - .with_stderr_contains( - "[WARNING] spurious network error \ - (2 tries remaining): [..]", - ) - .with_stderr_contains("[WARNING] spurious network error (1 tries remaining): [..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/new.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/new.rs deleted file mode 100644 index 829638141..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/new.rs +++ /dev/null @@ -1,492 +0,0 @@ -//! Tests for the `cargo new` command. - -use cargo_test_support::cargo_process; -use cargo_test_support::paths; -use std::env; -use std::fs::{self, File}; - -fn create_empty_gitconfig() { - // This helps on Windows where libgit2 is very aggressive in attempting to - // find a git config file. - let gitconfig = paths::home().join(".gitconfig"); - File::create(gitconfig).unwrap(); -} - -#[cargo_test] -fn simple_lib() { - cargo_process("new --lib foo --vcs none --edition 2015") - .with_stderr("[CREATED] library `foo` package") - .run(); - - assert!(paths::root().join("foo").is_dir()); - assert!(paths::root().join("foo/Cargo.toml").is_file()); - assert!(paths::root().join("foo/src/lib.rs").is_file()); - assert!(!paths::root().join("foo/.gitignore").is_file()); - - let lib = paths::root().join("foo/src/lib.rs"); - let contents = fs::read_to_string(&lib).unwrap(); - assert_eq!( - contents, - r#"#[cfg(test)] -mod tests { - #[test] - fn it_works() { - let result = 2 + 2; - assert_eq!(result, 4); - } -} -"# - ); - - cargo_process("build").cwd(&paths::root().join("foo")).run(); -} - -#[cargo_test] -fn simple_bin() { - cargo_process("new --bin foo --edition 2015") - .with_stderr("[CREATED] binary (application) `foo` package") - .run(); - - assert!(paths::root().join("foo").is_dir()); - assert!(paths::root().join("foo/Cargo.toml").is_file()); - assert!(paths::root().join("foo/src/main.rs").is_file()); - - cargo_process("build").cwd(&paths::root().join("foo")).run(); - assert!(paths::root() - .join(&format!("foo/target/debug/foo{}", env::consts::EXE_SUFFIX)) - .is_file()); -} - -#[cargo_test] -fn both_lib_and_bin() { - cargo_process("new --lib --bin foo") - .with_status(101) - .with_stderr("[ERROR] can't specify both lib and binary outputs") - .run(); -} - -#[cargo_test] -fn simple_git() { - cargo_process("new --lib foo --edition 2015").run(); - - assert!(paths::root().is_dir()); - assert!(paths::root().join("foo/Cargo.toml").is_file()); - assert!(paths::root().join("foo/src/lib.rs").is_file()); - assert!(paths::root().join("foo/.git").is_dir()); - assert!(paths::root().join("foo/.gitignore").is_file()); - - let fp = paths::root().join("foo/.gitignore"); - let contents = fs::read_to_string(&fp).unwrap(); - assert_eq!(contents, "/target\nCargo.lock\n",); - - cargo_process("build").cwd(&paths::root().join("foo")).run(); -} - -#[cargo_test] -fn no_argument() { - cargo_process("new") - .with_status(1) - .with_stderr_contains( - "\ -error: The following required arguments were not provided: - -", - ) - .run(); -} - -#[cargo_test] -fn existing() { - let dst = paths::root().join("foo"); - fs::create_dir(&dst).unwrap(); - cargo_process("new foo") - .with_status(101) - .with_stderr( - "[ERROR] destination `[CWD]/foo` already exists\n\n\ - Use `cargo init` to initialize the directory", - ) - .run(); -} - -#[cargo_test] -fn invalid_characters() { - cargo_process("new foo.rs") - .with_status(101) - .with_stderr( - "\ -[ERROR] invalid character `.` in package name: `foo.rs`, [..] -If you need a package name to not match the directory name, consider using --name flag. -If you need a binary with the name \"foo.rs\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/foo.rs.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"foo.rs\" - path = \"src/main.rs\" - -", - ) - .run(); -} - -#[cargo_test] -fn reserved_name() { - cargo_process("new test") - .with_status(101) - .with_stderr( - "\ -[ERROR] the name `test` cannot be used as a package name, it conflicts [..] -If you need a package name to not match the directory name, consider using --name flag. -If you need a binary with the name \"test\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/test.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"test\" - path = \"src/main.rs\" - -", - ) - .run(); -} - -#[cargo_test] -fn reserved_binary_name() { - cargo_process("new --bin incremental") - .with_status(101) - .with_stderr( - "\ -[ERROR] the name `incremental` cannot be used as a package name, it conflicts [..] -If you need a package name to not match the directory name, consider using --name flag. -", - ) - .run(); - - cargo_process("new --lib incremental") - .with_stderr( - "\ -[WARNING] the name `incremental` will not support binary executables with that name, \ -it conflicts with cargo's build directory names -[CREATED] library `incremental` package -", - ) - .run(); -} - -#[cargo_test] -fn keyword_name() { - cargo_process("new pub") - .with_status(101) - .with_stderr( - "\ -[ERROR] the name `pub` cannot be used as a package name, it is a Rust keyword -If you need a package name to not match the directory name, consider using --name flag. -If you need a binary with the name \"pub\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/pub.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"pub\" - path = \"src/main.rs\" - -", - ) - .run(); -} - -#[cargo_test] -fn std_name() { - cargo_process("new core") - .with_stderr( - "\ -[WARNING] the name `core` is part of Rust's standard library -It is recommended to use a different name to avoid problems. -If you need a package name to not match the directory name, consider using --name flag. -If you need a binary with the name \"core\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/core.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"core\" - path = \"src/main.rs\" - -[CREATED] binary (application) `core` package -", - ) - .run(); -} - -#[cargo_test] -fn git_prefers_command_line() { - let root = paths::root(); - fs::create_dir(&root.join(".cargo")).unwrap(); - fs::write( - &root.join(".cargo/config"), - r#" - [cargo-new] - vcs = "none" - name = "foo" - email = "bar" - "#, - ) - .unwrap(); - - cargo_process("new foo --vcs git").run(); - assert!(paths::root().join("foo/.gitignore").exists()); - assert!(!fs::read_to_string(paths::root().join("foo/Cargo.toml")) - .unwrap() - .contains("authors =")); -} - -#[cargo_test] -fn subpackage_no_git() { - cargo_process("new foo").run(); - - assert!(paths::root().join("foo/.git").is_dir()); - assert!(paths::root().join("foo/.gitignore").is_file()); - - let subpackage = paths::root().join("foo").join("components"); - fs::create_dir(&subpackage).unwrap(); - cargo_process("new foo/components/subcomponent").run(); - - assert!(!paths::root() - .join("foo/components/subcomponent/.git") - .is_file()); - assert!(!paths::root() - .join("foo/components/subcomponent/.gitignore") - .is_file()); -} - -#[cargo_test] -fn subpackage_git_with_gitignore() { - cargo_process("new foo").run(); - - assert!(paths::root().join("foo/.git").is_dir()); - assert!(paths::root().join("foo/.gitignore").is_file()); - - let gitignore = paths::root().join("foo/.gitignore"); - fs::write(gitignore, b"components").unwrap(); - - let subpackage = paths::root().join("foo/components"); - fs::create_dir(&subpackage).unwrap(); - cargo_process("new foo/components/subcomponent").run(); - - assert!(paths::root() - .join("foo/components/subcomponent/.git") - .is_dir()); - assert!(paths::root() - .join("foo/components/subcomponent/.gitignore") - .is_file()); -} - -#[cargo_test] -fn subpackage_git_with_vcs_arg() { - cargo_process("new foo").run(); - - let subpackage = paths::root().join("foo").join("components"); - fs::create_dir(&subpackage).unwrap(); - cargo_process("new foo/components/subcomponent --vcs git").run(); - - assert!(paths::root() - .join("foo/components/subcomponent/.git") - .is_dir()); - assert!(paths::root() - .join("foo/components/subcomponent/.gitignore") - .is_file()); -} - -#[cargo_test] -fn unknown_flags() { - cargo_process("new foo --flag") - .with_status(1) - .with_stderr_contains( - "error: Found argument '--flag' which wasn't expected, or isn't valid in this context", - ) - .run(); -} - -#[cargo_test] -fn explicit_invalid_name_not_suggested() { - cargo_process("new --name 10-invalid a") - .with_status(101) - .with_stderr( - "\ -[ERROR] the name `10-invalid` cannot be used as a package name, \ -the name cannot start with a digit\n\ -If you need a binary with the name \"10-invalid\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/10-invalid.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"10-invalid\" - path = \"src/main.rs\" - -", - ) - .run(); -} - -#[cargo_test] -fn explicit_project_name() { - cargo_process("new --lib foo --name bar") - .with_stderr("[CREATED] library `bar` package") - .run(); -} - -#[cargo_test] -fn new_with_edition_2015() { - cargo_process("new --edition 2015 foo").run(); - let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); - assert!(manifest.contains("edition = \"2015\"")); -} - -#[cargo_test] -fn new_with_edition_2018() { - cargo_process("new --edition 2018 foo").run(); - let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); - assert!(manifest.contains("edition = \"2018\"")); -} - -#[cargo_test] -fn new_default_edition() { - cargo_process("new foo").run(); - let manifest = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); - assert!(manifest.contains("edition = \"2021\"")); -} - -#[cargo_test] -fn new_with_bad_edition() { - cargo_process("new --edition something_else foo") - .with_stderr_contains("error: 'something_else' isn't a valid value[..]") - .with_status(1) - .run(); -} - -#[cargo_test] -fn new_with_reference_link() { - cargo_process("new foo").run(); - - let contents = fs::read_to_string(paths::root().join("foo/Cargo.toml")).unwrap(); - assert!(contents.contains("# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html")) -} - -#[cargo_test] -fn lockfile_constant_during_new() { - cargo_process("new foo").run(); - - cargo_process("build").cwd(&paths::root().join("foo")).run(); - let before = fs::read_to_string(paths::root().join("foo/Cargo.lock")).unwrap(); - cargo_process("build").cwd(&paths::root().join("foo")).run(); - let after = fs::read_to_string(paths::root().join("foo/Cargo.lock")).unwrap(); - assert_eq!(before, after); -} - -#[cargo_test] -fn restricted_windows_name() { - if cfg!(windows) { - cargo_process("new nul") - .with_status(101) - .with_stderr( - "\ -[ERROR] cannot use name `nul`, it is a reserved Windows filename -If you need a package name to not match the directory name, consider using --name flag. -", - ) - .run(); - } else { - cargo_process("new nul") - .with_stderr( - "\ -[WARNING] the name `nul` is a reserved Windows filename -This package will not work on Windows platforms. -[CREATED] binary (application) `nul` package -", - ) - .run(); - } -} - -#[cargo_test] -fn non_ascii_name() { - cargo_process("new ะŸั€ะธะฒะตั‚") - .with_stderr( - "\ -[WARNING] the name `ะŸั€ะธะฒะตั‚` contains non-ASCII characters -Support for non-ASCII crate names is experimental and only valid on the nightly toolchain. -[CREATED] binary (application) `ะŸั€ะธะฒะตั‚` package -", - ) - .run(); -} - -#[cargo_test] -fn non_ascii_name_invalid() { - // These are alphanumeric characters, but not Unicode XID. - cargo_process("new โ’ถโ’ทโ’ธ") - .with_status(101) - .with_stderr( - "\ -[ERROR] invalid character `โ’ถ` in package name: `โ’ถโ’ทโ’ธ`, \ -the first character must be a Unicode XID start character (most letters or `_`) -If you need a package name to not match the directory name, consider using --name flag. -If you need a binary with the name \"โ’ถโ’ทโ’ธ\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/โ’ถโ’ทโ’ธ.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"โ’ถโ’ทโ’ธ\" - path = \"src/main.rs\" - -", - ) - .run(); - - cargo_process("new aยผ") - .with_status(101) - .with_stderr( - "\ -[ERROR] invalid character `ยผ` in package name: `aยผ`, \ -characters must be Unicode XID characters (numbers, `-`, `_`, or most letters) -If you need a package name to not match the directory name, consider using --name flag. -If you need a binary with the name \"aยผ\", use a valid package name, \ -and set the binary name to be different from the package. \ -This can be done by setting the binary filename to `src/bin/aยผ.rs` \ -or change the name in Cargo.toml with: - - [[bin]] - name = \"aยผ\" - path = \"src/main.rs\" - -", - ) - .run(); -} - -#[cargo_test] -fn git_default_branch() { - // Check for init.defaultBranch support. - create_empty_gitconfig(); - cargo_process("new foo").run(); - let repo = git2::Repository::open(paths::root().join("foo")).unwrap(); - let head = repo.find_reference("HEAD").unwrap(); - assert_eq!(head.symbolic_target().unwrap(), "refs/heads/master"); - - fs::write( - paths::home().join(".gitconfig"), - r#" - [init] - defaultBranch = hello - "#, - ) - .unwrap(); - cargo_process("new bar").run(); - let repo = git2::Repository::open(paths::root().join("bar")).unwrap(); - let head = repo.find_reference("HEAD").unwrap(); - assert_eq!(head.symbolic_target().unwrap(), "refs/heads/hello"); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/offline.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/offline.rs deleted file mode 100644 index 1d7c3952d..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/offline.rs +++ /dev/null @@ -1,702 +0,0 @@ -//! Tests for --offline flag. - -use cargo_test_support::{basic_manifest, git, main_file, path2url, project, registry::Package}; -use std::fs; - -#[cargo_test] -fn offline_unused_target_dep() { - // --offline with a target dependency that is not used and not downloaded. - Package::new("unused_dep", "1.0.0").publish(); - Package::new("used_dep", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - [dependencies] - used_dep = "1.0" - [target.'cfg(unused)'.dependencies] - unused_dep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - // Do a build that downloads only what is necessary. - p.cargo("build") - .with_stderr_contains("[DOWNLOADED] used_dep [..]") - .with_stderr_does_not_contain("[DOWNLOADED] unused_dep [..]") - .run(); - p.cargo("clean").run(); - // Build offline, make sure it works. - p.cargo("build --offline").run(); -} - -#[cargo_test] -fn offline_missing_optional() { - Package::new("opt_dep", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - [dependencies] - opt_dep = { version = "1.0", optional = true } - "#, - ) - .file("src/lib.rs", "") - .build(); - // Do a build that downloads only what is necessary. - p.cargo("build") - .with_stderr_does_not_contain("[DOWNLOADED] opt_dep [..]") - .run(); - p.cargo("clean").run(); - // Build offline, make sure it works. - p.cargo("build --offline").run(); - p.cargo("build --offline --features=opt_dep") - .with_stderr( - "\ -[ERROR] failed to download `opt_dep v1.0.0` - -Caused by: - can't make HTTP request in the offline mode -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn cargo_compile_path_with_offline() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build --offline").run(); -} - -#[cargo_test] -fn cargo_compile_with_downloaded_dependency_with_offline() { - Package::new("present_dep", "1.2.3") - .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3")) - .file("src/lib.rs", "") - .publish(); - - // make package downloaded - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - present_dep = "1.2.3" - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build").run(); - - let p2 = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - - [dependencies] - present_dep = "1.2.3" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p2.cargo("build --offline") - .with_stderr( - "\ -[COMPILING] present_dep v1.2.3 -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_offline_not_try_update() { - // When --offline needs to download the registry, provide a reasonable - // error hint to run without --offline. - let p = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - - [dependencies] - not_cached_dep = "1.2.5" - "#, - ) - .file("src/lib.rs", "") - .build(); - - let msg = "\ -[ERROR] no matching package named `not_cached_dep` found -location searched: registry `crates-io` -required by package `bar v0.1.0 ([..]/bar)` -As a reminder, you're using offline mode (--offline) which can sometimes cause \ -surprising resolution failures, if this error is too confusing you may wish to \ -retry without the offline flag. -"; - - p.cargo("build --offline") - .with_status(101) - .with_stderr(msg) - .run(); - - // While we're here, also check the config works. - p.change_file(".cargo/config", "net.offline = true"); - p.cargo("build").with_status(101).with_stderr(msg).run(); -} - -#[cargo_test] -fn compile_offline_without_maxvers_cached() { - Package::new("present_dep", "1.2.1").publish(); - Package::new("present_dep", "1.2.2").publish(); - - Package::new("present_dep", "1.2.3") - .file("Cargo.toml", &basic_manifest("present_dep", "1.2.3")) - .file( - "src/lib.rs", - r#"pub fn get_version()->&'static str {"1.2.3"}"#, - ) - .publish(); - - Package::new("present_dep", "1.2.5") - .file("Cargo.toml", &basic_manifest("present_dep", "1.2.5")) - .file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#) - .publish(); - - // make package cached - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - present_dep = "=1.2.3" - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build").run(); - - let p2 = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - present_dep = "1.2" - "#, - ) - .file( - "src/main.rs", - "\ -extern crate present_dep; -fn main(){ - println!(\"{}\", present_dep::get_version()); -}", - ) - .build(); - - p2.cargo("run --offline") - .with_stderr( - "\ -[COMPILING] present_dep v1.2.3 -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] - Running `[..]`", - ) - .with_stdout("1.2.3") - .run(); -} - -#[cargo_test] -fn cargo_compile_forbird_git_httpsrepo_offline() { - let p = project() - .file( - "Cargo.toml", - r#" - - [project] - name = "foo" - version = "0.5.0" - authors = ["chabapok@example.com"] - - [dependencies.dep1] - git = 'https://github.com/some_user/dep1.git' - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("build --offline").with_status(101).with_stderr("\ -[ERROR] failed to get `dep1` as a dependency of package `foo v0.5.0 [..]` - -Caused by: - failed to load source for dependency `dep1` - -Caused by: - Unable to update https://github.com/some_user/dep1.git - -Caused by: - can't checkout from 'https://github.com/some_user/dep1.git': you are in the offline mode (--offline)").run(); -} - -#[cargo_test] -fn compile_offline_while_transitive_dep_not_cached() { - let baz = Package::new("baz", "1.0.0"); - let baz_path = baz.archive_dst(); - baz.publish(); - - let baz_content = fs::read(&baz_path).unwrap(); - // Truncate the file to simulate a download failure. - fs::write(&baz_path, &[]).unwrap(); - - Package::new("bar", "0.1.0").dep("baz", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/main.rs", "fn main(){}") - .build(); - - // simulate download bar, but fail to download baz - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]failed to verify the checksum of `baz[..]") - .run(); - - // Restore the file contents. - fs::write(&baz_path, &baz_content).unwrap(); - - p.cargo("build --offline") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to download `bar v0.1.0` - -Caused by: - can't make HTTP request in the offline mode -", - ) - .run(); -} - -#[cargo_test] -fn update_offline_not_cached() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - p.cargo("update --offline") - .with_status(101) - .with_stderr( - "\ -[ERROR] no matching package named `bar` found -location searched: registry `[..]` -required by package `foo v0.0.1 ([..]/foo)` -As a reminder, you're using offline mode (--offline) which can sometimes cause \ -surprising resolution failures, if this error is too confusing you may wish to \ -retry without the offline flag.", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_offline_with_cached_git_dep() { - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_manifest("dep1", "0.5.0")) - .file( - "src/lib.rs", - r#" - pub static COOL_STR:&str = "cached git repo rev1"; - "#, - ) - }); - - let repo = git2::Repository::open(&git_project.root()).unwrap(); - let rev1 = repo.revparse_single("HEAD").unwrap().id(); - - // Commit the changes and make sure we trigger a recompile - git_project.change_file( - "src/lib.rs", - r#"pub static COOL_STR:&str = "cached git repo rev2";"#, - ); - git::add(&repo); - let rev2 = git::commit(&repo); - - // cache to registry rev1 and rev2 - let prj = project() - .at("cache_git_dep") - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "cache_git_dep" - version = "0.5.0" - - [dependencies.dep1] - git = '{}' - rev = "{}" - "#, - git_project.url(), - rev1 - ), - ) - .file("src/main.rs", "fn main(){}") - .build(); - prj.cargo("build").run(); - - prj.change_file( - "Cargo.toml", - &format!( - r#" - [project] - name = "cache_git_dep" - version = "0.5.0" - - [dependencies.dep1] - git = '{}' - rev = "{}" - "#, - git_project.url(), - rev2 - ), - ); - prj.cargo("build").run(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - - [dependencies.dep1] - git = '{}' - "#, - git_project.url() - ), - ) - .file( - "src/main.rs", - &main_file(r#""hello from {}", dep1::COOL_STR"#, &["dep1"]), - ) - .build(); - - let git_root = git_project.root(); - - p.cargo("build --offline") - .with_stderr(format!( - "\ -[COMPILING] dep1 v0.5.0 ({}#[..]) -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - path2url(git_root), - )) - .run(); - - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")) - .with_stdout("hello from cached git repo rev2\n") - .run(); - - p.change_file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.5.0" - - [dependencies.dep1] - git = '{}' - rev = "{}" - "#, - git_project.url(), - rev1 - ), - ); - - p.cargo("build --offline").run(); - p.process(&p.bin("foo")) - .with_stdout("hello from cached git repo rev1\n") - .run(); -} - -#[cargo_test] -fn offline_resolve_optional_fail() { - // Example where resolve fails offline. - // - // This happens if at least 1 version of an optional dependency is - // available, but none of them satisfy the requirements. The current logic - // that handles this is `RegistryIndex::query_inner`, and it doesn't know - // if the package being queried is an optional one. This is not ideal, it - // would be best if it just ignored optional (unselected) dependencies. - Package::new("dep", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = { version = "1.0", optional = true } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("fetch").run(); - - // Change dep to 2.0. - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = { version = "2.0", optional = true } - "#, - ); - - p.cargo("build --offline") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to select a version for the requirement `dep = \"^2.0\"` -candidate versions found which didn't match: 1.0.0 -location searched: `[..]` index (which is replacing registry `crates-io`) -required by package `foo v0.1.0 ([..]/foo)` -perhaps a crate was updated and forgotten to be re-vendored? -As a reminder, you're using offline mode (--offline) which can sometimes cause \ -surprising resolution failures, if this error is too confusing you may wish to \ -retry without the offline flag. -", - ) - .run(); -} - -#[cargo_test] -fn offline_with_all_patched() { - // Offline works if everything is patched. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = "1.0" - - [patch.crates-io] - dep = {path = "dep"} - "#, - ) - .file("src/lib.rs", "pub fn f() { dep::foo(); }") - .file("dep/Cargo.toml", &basic_manifest("dep", "1.0.0")) - .file("dep/src/lib.rs", "pub fn foo() {}") - .build(); - - p.cargo("check --offline").run(); -} - -#[cargo_test] -fn update_offline_cached() { - // Cache a few versions to update against - let p = project().file("src/lib.rs", "").build(); - let versions = ["1.2.3", "1.2.5", "1.2.9"]; - for vers in versions.iter() { - Package::new("present_dep", vers) - .file("Cargo.toml", &basic_manifest("present_dep", vers)) - .file( - "src/lib.rs", - format!(r#"pub fn get_version()->&'static str {{ "{}" }}"#, vers).as_str(), - ) - .publish(); - // make package cached - p.change_file( - "Cargo.toml", - format!( - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - present_dep = "={}" - "#, - vers - ) - .as_str(), - ); - p.cargo("build").run(); - } - - let p2 = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - present_dep = "1.2" - "#, - ) - .file( - "src/main.rs", - "\ -extern crate present_dep; -fn main(){ - println!(\"{}\", present_dep::get_version()); -}", - ) - .build(); - - p2.cargo("build --offline") - .with_stderr( - "\ -[COMPILING] present_dep v1.2.9 -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p2.rename_run("foo", "with_1_2_9") - .with_stdout("1.2.9") - .run(); - // updates happen without updating the index - p2.cargo("update -p present_dep --precise 1.2.3 --offline") - .with_status(0) - .with_stderr( - "\ -[UPDATING] present_dep v1.2.9 -> v1.2.3 -", - ) - .run(); - - p2.cargo("build --offline") - .with_stderr( - "\ -[COMPILING] present_dep v1.2.3 -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p2.rename_run("foo", "with_1_2_3") - .with_stdout("1.2.3") - .run(); - - // Offline update should only print package details and not index updating - p2.cargo("update --offline") - .with_status(0) - .with_stderr( - "\ -[UPDATING] present_dep v1.2.3 -> v1.2.9 -", - ) - .run(); - - // No v1.2.8 loaded into the cache so expect failure. - p2.cargo("update -p present_dep --precise 1.2.8 --offline") - .with_status(101) - .with_stderr( - "\ -[ERROR] no matching package named `present_dep` found -location searched: registry `[..]` -required by package `foo v0.1.0 ([..]/foo)` -As a reminder, you're using offline mode (--offline) which can sometimes cause \ -surprising resolution failures, if this error is too confusing you may wish to \ -retry without the offline flag. -", - ) - .run(); -} - -#[cargo_test] -fn offline_and_frozen_and_no_lock() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("build --frozen --offline") - .with_status(101) - .with_stderr("\ -error: the lock file [ROOT]/foo/Cargo.lock needs to be updated but --frozen was passed to prevent this -If you want to try to generate the lock file without accessing the network, \ -remove the --frozen flag and use --offline instead. -") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/old_cargos.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/old_cargos.rs deleted file mode 100644 index 10179bc2b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/old_cargos.rs +++ /dev/null @@ -1,646 +0,0 @@ -//! Tests for checking behavior of old cargos. -//! -//! These tests are ignored because it is intended to be run on a developer -//! system with a bunch of toolchains installed. This requires `rustup` to be -//! installed. It will iterate over installed toolchains, and run some tests -//! over each one, producing a report at the end. As of this writing, I have -//! tested 1.0 to 1.51. Run this with: -//! -//! ```console -//! cargo test --test testsuite -- old_cargos --nocapture --ignored -//! ``` - -use cargo::CargoResult; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::{self, Dependency, Package}; -use cargo_test_support::{cargo_exe, execs, paths, process, project, rustc_host}; -use cargo_util::{ProcessBuilder, ProcessError}; -use semver::Version; -use std::fs; - -fn tc_process(cmd: &str, toolchain: &str) -> ProcessBuilder { - let mut p = if toolchain == "this" { - if cmd == "cargo" { - process(&cargo_exe()) - } else { - process(cmd) - } - } else { - let mut cmd = process(cmd); - cmd.arg(format!("+{}", toolchain)); - cmd - }; - // Reset PATH since `process` modifies it to remove rustup. - p.env("PATH", std::env::var_os("PATH").unwrap()); - p -} - -/// Returns a sorted list of all toolchains. -/// -/// The returned value includes the parsed version, and the rustup toolchain -/// name as a string. -fn collect_all_toolchains() -> Vec<(Version, String)> { - let rustc_version = |tc| { - let mut cmd = tc_process("rustc", tc); - cmd.arg("-V"); - let output = cmd.exec_with_output().expect("rustc installed"); - let version = std::str::from_utf8(&output.stdout).unwrap(); - let parts: Vec<_> = version.split_whitespace().collect(); - assert_eq!(parts[0], "rustc"); - assert!(parts[1].starts_with("1.")); - Version::parse(parts[1]).expect("valid version") - }; - - // Provide a way to override the list. - if let Ok(tcs) = std::env::var("OLD_CARGO") { - return tcs - .split(',') - .map(|tc| (rustc_version(tc), tc.to_string())) - .collect(); - } - - let host = rustc_host(); - // I tend to have lots of toolchains installed, but I don't want to test - // all of them (like dated nightlies, or toolchains for non-host targets). - let valid_names = &[ - format!("stable-{}", host), - format!("beta-{}", host), - format!("nightly-{}", host), - ]; - - let output = ProcessBuilder::new("rustup") - .args(&["toolchain", "list"]) - .exec_with_output() - .expect("rustup should be installed"); - let stdout = std::str::from_utf8(&output.stdout).unwrap(); - let mut toolchains: Vec<_> = stdout - .lines() - .map(|line| { - // Some lines say things like (default), just get the version. - line.split_whitespace().next().expect("non-empty line") - }) - .filter(|line| { - line.ends_with(&host) - && (line.starts_with("1.") || valid_names.iter().any(|name| name == line)) - }) - .map(|line| (rustc_version(line), line.to_string())) - .collect(); - - // Also include *this* cargo. - toolchains.push((rustc_version("this"), "this".to_string())); - toolchains.sort_by(|a, b| a.0.cmp(&b.0)); - toolchains -} - -// This is a test for exercising the behavior of older versions of cargo with -// the new feature syntax. -// -// The test involves a few dependencies with different feature requirements: -// -// * `bar` 1.0.0 is the base version that does not use the new syntax. -// * `bar` 1.0.1 has a feature with the new syntax, but the feature is unused. -// The optional dependency `new-baz-dep` should not be activated. -// * `bar` 1.0.2 has a dependency on `baz` that *requires* the new feature -// syntax. -#[ignore] -#[cargo_test] -fn new_features() { - if std::process::Command::new("rustup").output().is_err() { - panic!("old_cargos requires rustup to be installed"); - } - Package::new("new-baz-dep", "1.0.0").publish(); - - Package::new("baz", "1.0.0").publish(); - let baz101_cksum = Package::new("baz", "1.0.1") - .add_dep(Dependency::new("new-baz-dep", "1.0").optional(true)) - .feature("new-feat", &["dep:new-baz-dep"]) - .publish(); - - let bar100_cksum = Package::new("bar", "1.0.0") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("feat", &["baz"]) - .publish(); - let bar101_cksum = Package::new("bar", "1.0.1") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("feat", &["dep:baz"]) - .publish(); - let bar102_cksum = Package::new("bar", "1.0.2") - .add_dep(Dependency::new("baz", "1.0").enable_features(&["new-feat"])) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - let lock_bar_to = |toolchain_version: &Version, bar_version| { - let lock = if toolchain_version < &Version::new(1, 12, 0) { - let url = registry::registry_url(); - match bar_version { - 100 => format!( - r#" - [root] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar 1.0.0 (registry+{url})", - ] - - [[package]] - name = "bar" - version = "1.0.0" - source = "registry+{url}" - "#, - url = url - ), - 101 => format!( - r#" - [root] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar 1.0.1 (registry+{url})", - ] - - [[package]] - name = "bar" - version = "1.0.1" - source = "registry+{url}" - "#, - url = url - ), - 102 => format!( - r#" - [root] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar 1.0.2 (registry+{url})", - ] - - [[package]] - name = "bar" - version = "1.0.2" - source = "registry+{url}" - dependencies = [ - "baz 1.0.1 (registry+{url})", - ] - - [[package]] - name = "baz" - version = "1.0.1" - source = "registry+{url}" - "#, - url = url - ), - _ => panic!("unexpected version"), - } - } else { - match bar_version { - 100 => format!( - r#" - [root] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - ] - - [[package]] - name = "bar" - version = "1.0.0" - source = "registry+https://github.com/rust-lang/crates.io-index" - - [metadata] - "checksum bar 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" - "#, - bar100_cksum - ), - 101 => format!( - r#" - [root] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - ] - - [[package]] - name = "bar" - version = "1.0.1" - source = "registry+https://github.com/rust-lang/crates.io-index" - - [metadata] - "checksum bar 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "{}" - "#, - bar101_cksum - ), - 102 => format!( - r#" - [root] - name = "foo" - version = "0.1.0" - dependencies = [ - "bar 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - ] - - [[package]] - name = "bar" - version = "1.0.2" - source = "registry+https://github.com/rust-lang/crates.io-index" - dependencies = [ - "baz 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - ] - - [[package]] - name = "baz" - version = "1.0.1" - source = "registry+https://github.com/rust-lang/crates.io-index" - - [metadata] - "checksum bar 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "{bar102_cksum}" - "checksum baz 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "{baz101_cksum}" - "#, - bar102_cksum = bar102_cksum, - baz101_cksum = baz101_cksum - ), - _ => panic!("unexpected version"), - } - }; - p.change_file("Cargo.lock", &lock); - }; - - let toolchains = collect_all_toolchains(); - - let config_path = paths::home().join(".cargo/config"); - let lock_path = p.root().join("Cargo.lock"); - - struct ToolchainBehavior { - bar: Option, - baz: Option, - new_baz_dep: Option, - } - - // Collect errors to print at the end. One entry per toolchain, a list of - // strings to print. - let mut unexpected_results: Vec> = Vec::new(); - - for (version, toolchain) in &toolchains { - let mut tc_result = Vec::new(); - // Write a config appropriate for this version. - if version < &Version::new(1, 12, 0) { - fs::write( - &config_path, - format!( - r#" - [registry] - index = "{}" - "#, - registry::registry_url() - ), - ) - .unwrap(); - } else { - fs::write( - &config_path, - format!( - " - [source.crates-io] - registry = 'https://wut' # only needed by 1.12 - replace-with = 'dummy-registry' - - [source.dummy-registry] - registry = '{}' - ", - registry::registry_url() - ), - ) - .unwrap(); - } - - // Fetches the version of a package in the lock file. - let pkg_version = |pkg| -> Option { - let output = tc_process("cargo", toolchain) - .args(&["pkgid", pkg]) - .cwd(p.root()) - .exec_with_output() - .ok()?; - let stdout = std::str::from_utf8(&output.stdout).unwrap(); - let version = stdout - .trim() - .rsplitn(2, ':') - .next() - .expect("version after colon"); - Some(Version::parse(version).expect("parseable version")) - }; - - // Runs `cargo build` and returns the versions selected in the lock. - let run_cargo = || -> CargoResult { - match tc_process("cargo", toolchain) - .args(&["build", "--verbose"]) - .cwd(p.root()) - .exec_with_output() - { - Ok(_output) => { - eprintln!("{} ok", toolchain); - let bar = pkg_version("bar"); - let baz = pkg_version("baz"); - let new_baz_dep = pkg_version("new-baz-dep"); - Ok(ToolchainBehavior { - bar, - baz, - new_baz_dep, - }) - } - Err(e) => { - eprintln!("{} err {}", toolchain, e); - Err(e) - } - } - }; - - macro_rules! check_lock { - ($tc_result:ident, $pkg:expr, $which:expr, $actual:expr, None) => { - check_lock!(= $tc_result, $pkg, $which, $actual, None); - }; - ($tc_result:ident, $pkg:expr, $which:expr, $actual:expr, $expected:expr) => { - check_lock!(= $tc_result, $pkg, $which, $actual, Some(Version::parse($expected).unwrap())); - }; - (= $tc_result:ident, $pkg:expr, $which:expr, $actual:expr, $expected:expr) => { - let exp: Option = $expected; - if $actual != $expected { - $tc_result.push(format!( - "{} for {} saw {:?} but expected {:?}", - $which, $pkg, $actual, exp - )); - } - }; - } - - let check_err_contains = |tc_result: &mut Vec<_>, err: anyhow::Error, contents| { - if let Some(ProcessError { - stderr: Some(stderr), - .. - }) = err.downcast_ref::() - { - let stderr = std::str::from_utf8(stderr).unwrap(); - if !stderr.contains(contents) { - tc_result.push(format!( - "{} expected to see error contents:\n{}\nbut saw:\n{}", - toolchain, contents, stderr - )); - } - } else { - panic!("{} unexpected error {}", toolchain, err); - } - }; - - // Unlocked behavior. - let which = "unlocked"; - lock_path.rm_rf(); - p.build_dir().rm_rf(); - match run_cargo() { - Ok(behavior) => { - // TODO: Switch to 51 after backport. - if version < &Version::new(1, 52, 0) && toolchain != "this" { - check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2"); - check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1"); - check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); - } else { - check_lock!(tc_result, "bar", which, behavior.bar, "1.0.0"); - check_lock!(tc_result, "baz", which, behavior.baz, None); - check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); - } - } - Err(e) => { - tc_result.push(format!("unlocked build failed: {}", e)); - } - } - - let which = "locked bar 1.0.0"; - lock_bar_to(version, 100); - match run_cargo() { - Ok(behavior) => { - check_lock!(tc_result, "bar", which, behavior.bar, "1.0.0"); - check_lock!(tc_result, "baz", which, behavior.baz, None); - check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); - } - Err(e) => { - tc_result.push(format!("bar 1.0.0 locked build failed: {}", e)); - } - } - - let which = "locked bar 1.0.1"; - lock_bar_to(version, 101); - match run_cargo() { - Ok(behavior) => { - check_lock!(tc_result, "bar", which, behavior.bar, "1.0.1"); - check_lock!(tc_result, "baz", which, behavior.baz, None); - check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); - } - Err(e) => { - if toolchain == "this" { - // 1.0.1 can't be used without -Znamespaced-features - // It gets filtered out of the index. - check_err_contains(&mut tc_result, e, - "error: failed to select a version for the requirement `bar = \"=1.0.1\"`\n\ - candidate versions found which didn't match: 1.0.2, 1.0.0" - ); - } else { - tc_result.push(format!("bar 1.0.1 locked build failed: {}", e)); - } - } - } - - let which = "locked bar 1.0.2"; - lock_bar_to(version, 102); - match run_cargo() { - Ok(behavior) => { - check_lock!(tc_result, "bar", which, behavior.bar, "1.0.2"); - check_lock!(tc_result, "baz", which, behavior.baz, "1.0.1"); - check_lock!(tc_result, "new-baz-dep", which, behavior.new_baz_dep, None); - } - Err(e) => { - if toolchain == "this" { - // baz can't lock to 1.0.1, it requires -Znamespaced-features - check_err_contains(&mut tc_result, e, - "error: failed to select a version for the requirement `baz = \"=1.0.1\"`\n\ - candidate versions found which didn't match: 1.0.0" - ); - } else { - tc_result.push(format!("bar 1.0.2 locked build failed: {}", e)); - } - } - } - - unexpected_results.push(tc_result); - } - - // Generate a report. - let mut has_err = false; - for ((tc_vers, tc_name), errs) in toolchains.iter().zip(unexpected_results) { - if errs.is_empty() { - continue; - } - eprintln!("error: toolchain {} (version {}):", tc_name, tc_vers); - for err in errs { - eprintln!(" {}", err); - } - has_err = true; - } - if has_err { - panic!("at least one toolchain did not run as expected"); - } -} - -#[cargo_test] -#[ignore] -fn index_cache_rebuild() { - // Checks that the index cache gets rebuilt. - // - // 1.48 will not cache entries with features with the same name as a - // dependency. If the cache does not get rebuilt, then running with - // `-Znamespaced-features` would prevent the new cargo from seeing those - // entries. The index cache version was changed to prevent this from - // happening, and switching between versions should work correctly - // (although it will thrash the cash, that's better than not working - // correctly. - Package::new("baz", "1.0.0").publish(); - Package::new("bar", "1.0.0").publish(); - Package::new("bar", "1.0.1") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("baz", &["dep:baz"]) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // This version of Cargo errors on index entries that have overlapping - // feature names, so 1.0.1 will be missing. - execs() - .with_process_builder(tc_process("cargo", "1.48.0")) - .arg("check") - .cwd(p.root()) - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.0 [..] -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - fs::remove_file(p.root().join("Cargo.lock")).unwrap(); - - // This should rebuild the cache and use 1.0.1. - p.cargo("check -Znamespaced-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.1 [..] -[CHECKING] bar v1.0.1 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - fs::remove_file(p.root().join("Cargo.lock")).unwrap(); - - // Verify 1.48 can still resolve, and is at 1.0.0. - execs() - .with_process_builder(tc_process("cargo", "1.48.0")) - .arg("tree") - .cwd(p.root()) - .with_stdout( - "\ -foo v0.1.0 [..] -โ””โ”€โ”€ bar v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -#[ignore] -fn avoids_split_debuginfo_collision() { - // Checks for a bug where .o files were being incorrectly shared between - // different toolchains using incremental and split-debuginfo on macOS. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.dev] - split-debuginfo = "unpacked" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - execs() - .with_process_builder(tc_process("cargo", "stable")) - .arg("build") - .env("CARGO_INCREMENTAL", "1") - .cwd(p.root()) - .with_stderr( - "\ -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("build") - .env("CARGO_INCREMENTAL", "1") - .with_stderr( - "\ -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - execs() - .with_process_builder(tc_process("cargo", "stable")) - .arg("build") - .env("CARGO_INCREMENTAL", "1") - .cwd(p.root()) - .with_stderr( - "\ -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/out_dir.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/out_dir.rs deleted file mode 100644 index 300245fe8..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/out_dir.rs +++ /dev/null @@ -1,317 +0,0 @@ -//! Tests for --out-dir flag. - -use cargo_test_support::sleep_ms; -use cargo_test_support::{basic_manifest, project}; -use std::env; -use std::fs; -use std::path::Path; - -#[cargo_test] -fn binary_with_debug() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#) - .build(); - - p.cargo("build -Z unstable-options --out-dir out") - .masquerade_as_nightly_cargo() - .enable_mac_dsym() - .run(); - check_dir_contents( - &p.root().join("out"), - &["foo"], - &["foo", "foo.dSYM"], - &["foo.exe", "foo.pdb"], - &["foo.exe"], - ); -} - -#[cargo_test] -fn static_library_with_debug() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - crate-type = ["staticlib"] - "#, - ) - .file( - "src/lib.rs", - r#" - #[no_mangle] - pub extern "C" fn foo() { println!("Hello, World!") } - "#, - ) - .build(); - - p.cargo("build -Z unstable-options --out-dir out") - .masquerade_as_nightly_cargo() - .run(); - check_dir_contents( - &p.root().join("out"), - &["libfoo.a"], - &["libfoo.a"], - &["foo.lib"], - &["libfoo.a"], - ); -} - -#[cargo_test] -fn dynamic_library_with_debug() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - crate-type = ["cdylib"] - "#, - ) - .file( - "src/lib.rs", - r#" - #[no_mangle] - pub extern "C" fn foo() { println!("Hello, World!") } - "#, - ) - .build(); - - p.cargo("build -Z unstable-options --out-dir out") - .masquerade_as_nightly_cargo() - .enable_mac_dsym() - .run(); - check_dir_contents( - &p.root().join("out"), - &["libfoo.so"], - &["libfoo.dylib", "libfoo.dylib.dSYM"], - &["foo.dll", "foo.dll.exp", "foo.dll.lib", "foo.pdb"], - &["foo.dll", "libfoo.dll.a"], - ); -} - -#[cargo_test] -fn rlib_with_debug() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - crate-type = ["rlib"] - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn foo() { println!("Hello, World!") } - "#, - ) - .build(); - - p.cargo("build -Z unstable-options --out-dir out") - .masquerade_as_nightly_cargo() - .run(); - check_dir_contents( - &p.root().join("out"), - &["libfoo.rlib"], - &["libfoo.rlib"], - &["libfoo.rlib"], - &["libfoo.rlib"], - ); -} - -#[cargo_test] -fn include_only_the_binary_from_the_current_package() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [workspace] - - [dependencies] - utils = { path = "./utils" } - "#, - ) - .file("src/lib.rs", "extern crate utils;") - .file( - "src/main.rs", - r#" - extern crate foo; - extern crate utils; - fn main() { - println!("Hello, World!") - } - "#, - ) - .file("utils/Cargo.toml", &basic_manifest("utils", "0.0.1")) - .file("utils/src/lib.rs", "") - .build(); - - p.cargo("build -Z unstable-options --bin foo --out-dir out") - .masquerade_as_nightly_cargo() - .enable_mac_dsym() - .run(); - check_dir_contents( - &p.root().join("out"), - &["foo"], - &["foo", "foo.dSYM"], - &["foo.exe", "foo.pdb"], - &["foo.exe"], - ); -} - -#[cargo_test] -fn out_dir_is_a_file() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#) - .file("out", "") - .build(); - - p.cargo("build -Z unstable-options --out-dir out") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr_contains("[ERROR] failed to create directory [..]") - .run(); -} - -#[cargo_test] -fn replaces_artifacts() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("foo") }"#) - .build(); - - p.cargo("build -Z unstable-options --out-dir out") - .masquerade_as_nightly_cargo() - .run(); - p.process( - &p.root() - .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)), - ) - .with_stdout("foo") - .run(); - - sleep_ms(1000); - p.change_file("src/main.rs", r#"fn main() { println!("bar") }"#); - - p.cargo("build -Z unstable-options --out-dir out") - .masquerade_as_nightly_cargo() - .run(); - p.process( - &p.root() - .join(&format!("out/foo{}", env::consts::EXE_SUFFIX)), - ) - .with_stdout("bar") - .run(); -} - -#[cargo_test] -fn avoid_build_scripts() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/main.rs", "fn main() {}") - .file("a/build.rs", r#"fn main() { println!("hello-build-a"); }"#) - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/main.rs", "fn main() {}") - .file("b/build.rs", r#"fn main() { println!("hello-build-b"); }"#) - .build(); - - p.cargo("build -Z unstable-options --out-dir out -vv") - .masquerade_as_nightly_cargo() - .enable_mac_dsym() - .with_stdout_contains("[a 0.0.1] hello-build-a") - .with_stdout_contains("[b 0.0.1] hello-build-b") - .run(); - check_dir_contents( - &p.root().join("out"), - &["a", "b"], - &["a", "a.dSYM", "b", "b.dSYM"], - &["a.exe", "a.pdb", "b.exe", "b.pdb"], - &["a.exe", "b.exe"], - ); -} - -#[cargo_test] -fn cargo_build_out_dir() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("Hello, World!") }"#) - .file( - ".cargo/config", - r#" - [build] - out-dir = "out" - "#, - ) - .build(); - - p.cargo("build -Z unstable-options") - .masquerade_as_nightly_cargo() - .enable_mac_dsym() - .run(); - check_dir_contents( - &p.root().join("out"), - &["foo"], - &["foo", "foo.dSYM"], - &["foo.exe", "foo.pdb"], - &["foo.exe"], - ); -} - -fn check_dir_contents( - out_dir: &Path, - expected_linux: &[&str], - expected_mac: &[&str], - expected_win_msvc: &[&str], - expected_win_gnu: &[&str], -) { - let expected = if cfg!(target_os = "windows") { - if cfg!(target_env = "msvc") { - expected_win_msvc - } else { - expected_win_gnu - } - } else if cfg!(target_os = "macos") { - expected_mac - } else { - expected_linux - }; - - let actual = list_dir(out_dir); - let mut expected = expected.iter().map(|s| s.to_string()).collect::>(); - expected.sort_unstable(); - assert_eq!(actual, expected); -} - -fn list_dir(dir: &Path) -> Vec { - let mut res = Vec::new(); - for entry in fs::read_dir(dir).unwrap() { - let entry = entry.unwrap(); - res.push(entry.file_name().into_string().unwrap()); - } - res.sort_unstable(); - res -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/owner.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/owner.rs deleted file mode 100644 index 8c4bcbe17..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/owner.rs +++ /dev/null @@ -1,123 +0,0 @@ -//! Tests for the `cargo owner` command. - -use std::fs; - -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::project; -use cargo_test_support::registry::{self, api_path}; - -fn setup(name: &str, content: Option<&str>) { - let dir = api_path().join(format!("api/v1/crates/{}", name)); - dir.mkdir_p(); - if let Some(body) = content { - fs::write(dir.join("owners"), body).unwrap(); - } -} - -#[cargo_test] -fn simple_list() { - registry::init(); - let content = r#"{ - "users": [ - { - "id": 70, - "login": "github:rust-lang:core", - "name": "Core" - }, - { - "id": 123, - "login": "octocat" - } - ] - }"#; - setup("foo", Some(content)); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("owner -l --token sekrit") - .with_stdout( - "\ -github:rust-lang:core (Core) -octocat -", - ) - .run(); -} - -#[cargo_test] -fn simple_add() { - registry::init(); - setup("foo", None); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("owner -a username --token sekrit") - .with_status(101) - .with_stderr( - " Updating `[..]` index -error: failed to invite owners to crate `foo` on registry at file://[..] - -Caused by: - EOF while parsing a value at line 1 column 0", - ) - .run(); -} - -#[cargo_test] -fn simple_remove() { - registry::init(); - setup("foo", None); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("owner -r username --token sekrit") - .with_status(101) - .with_stderr( - " Updating `[..]` index - Owner removing [\"username\"] from crate foo -error: failed to remove owners from crate `foo` on registry at file://[..] - -Caused by: - EOF while parsing a value at line 1 column 0", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/package.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/package.rs deleted file mode 100644 index 054189a08..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/package.rs +++ /dev/null @@ -1,2209 +0,0 @@ -//! Tests for the `cargo package` command. - -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::publish::validate_crate_contents; -use cargo_test_support::registry::{self, Package}; -use cargo_test_support::{ - basic_manifest, cargo_process, git, path2url, paths, project, symlink_supported, t, -}; -use flate2::read::GzDecoder; -use std::fs::{self, read_to_string, File}; -use std::path::Path; -use tar::Archive; - -#[cargo_test] -fn simple() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - exclude = ["*.txt"] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .file("src/bar.txt", "") // should be ignored when packaging - .build(); - - p.cargo("package") - .with_stderr( - "\ -[WARNING] manifest has no documentation[..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); - p.cargo("package -l") - .with_stdout( - "\ -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); - p.cargo("package").with_stdout("").run(); - - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - validate_crate_contents( - f, - "foo-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - &[], - ); -} - -#[cargo_test] -fn metadata_warning() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("package") - .with_stderr( - "\ -warning: manifest has no description, license, license-file, documentation, \ -homepage or repository. -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - p.cargo("package") - .with_stderr( - "\ -warning: manifest has no description, documentation, homepage or repository. -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - repository = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - p.cargo("package") - .with_stderr( - "\ -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn package_verbose() { - let root = paths::root().join("all"); - let repo = git::repo(&root) - .file("Cargo.toml", &basic_manifest("foo", "0.0.1")) - .file("src/main.rs", "fn main() {}") - .file("a/a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/a/src/lib.rs", "") - .build(); - cargo_process("build").cwd(repo.root()).run(); - - println!("package main repo"); - cargo_process("package -v --no-verify") - .cwd(repo.root()) - .with_stderr( - "\ -[WARNING] manifest has no description[..] -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[PACKAGING] foo v0.0.1 ([..]) -[ARCHIVING] .cargo_vcs_info.json -[ARCHIVING] Cargo.lock -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] src/main.rs -", - ) - .run(); - - let f = File::open(&repo.root().join("target/package/foo-0.0.1.crate")).unwrap(); - let vcs_contents = format!( - r#"{{ - "git": {{ - "sha1": "{}" - }}, - "path_in_vcs": "" -}} -"#, - repo.revparse_head() - ); - validate_crate_contents( - f, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - ".cargo_vcs_info.json", - ], - &[(".cargo_vcs_info.json", &vcs_contents)], - ); - - println!("package sub-repo"); - cargo_process("package -v --no-verify") - .cwd(repo.root().join("a/a")) - .with_stderr( - "\ -[WARNING] manifest has no description[..] -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[PACKAGING] a v0.0.1 ([..]) -[ARCHIVING] .cargo_vcs_info.json -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] src/lib.rs -", - ) - .run(); - - let f = File::open(&repo.root().join("a/a/target/package/a-0.0.1.crate")).unwrap(); - let vcs_contents = format!( - r#"{{ - "git": {{ - "sha1": "{}" - }}, - "path_in_vcs": "a/a" -}} -"#, - repo.revparse_head() - ); - validate_crate_contents( - f, - "a-0.0.1.crate", - &[ - "Cargo.toml", - "Cargo.toml.orig", - "src/lib.rs", - ".cargo_vcs_info.json", - ], - &[(".cargo_vcs_info.json", &vcs_contents)], - ); -} - -#[cargo_test] -fn package_verification() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("build").run(); - p.cargo("package") - .with_stderr( - "\ -[WARNING] manifest has no description[..] -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn vcs_file_collision() { - let p = project().build(); - let _ = git::repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - description = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - documentation = "foo" - homepage = "foo" - repository = "foo" - exclude = ["*.no-existe"] - "#, - ) - .file( - "src/main.rs", - r#" - fn main() {} - "#, - ) - .file(".cargo_vcs_info.json", "foo") - .build(); - p.cargo("package") - .arg("--no-verify") - .with_status(101) - .with_stderr( - "\ -[ERROR] invalid inclusion of reserved file name .cargo_vcs_info.json \ -in package source -", - ) - .run(); -} - -#[cargo_test] -fn path_dependency_no_version() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("package") - .with_status(101) - .with_stderr( - "\ -[WARNING] manifest has no documentation, homepage or repository. -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[ERROR] all dependencies must have a version specified when packaging. -dependency `bar` does not specify a version\n\ -Note: The packaged dependency will use the version from crates.io, -the `path` specification will be removed from the dependency declaration. -", - ) - .run(); -} - -#[cargo_test] -fn git_dependency_no_version() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [dependencies.foo] - git = "git://path/to/nowhere" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("package") - .with_status(101) - .with_stderr( - "\ -[WARNING] manifest has no documentation, homepage or repository. -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[ERROR] all dependencies must have a version specified when packaging. -dependency `foo` does not specify a version -Note: The packaged dependency will use the version from crates.io, -the `git` specification will be removed from the dependency declaration. -", - ) - .run(); -} - -#[cargo_test] -fn exclude() { - let root = paths::root().join("exclude"); - let repo = git::repo(&root) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - exclude = [ - "*.txt", - # file in root - "file_root_1", # NO_CHANGE (ignored) - "/file_root_2", # CHANGING (packaged -> ignored) - "file_root_3/", # NO_CHANGE (packaged) - "file_root_4/*", # NO_CHANGE (packaged) - "file_root_5/**", # NO_CHANGE (packaged) - # file in sub-dir - "file_deep_1", # CHANGING (packaged -> ignored) - "/file_deep_2", # NO_CHANGE (packaged) - "file_deep_3/", # NO_CHANGE (packaged) - "file_deep_4/*", # NO_CHANGE (packaged) - "file_deep_5/**", # NO_CHANGE (packaged) - # dir in root - "dir_root_1", # CHANGING (packaged -> ignored) - "/dir_root_2", # CHANGING (packaged -> ignored) - "dir_root_3/", # CHANGING (packaged -> ignored) - "dir_root_4/*", # NO_CHANGE (ignored) - "dir_root_5/**", # NO_CHANGE (ignored) - # dir in sub-dir - "dir_deep_1", # CHANGING (packaged -> ignored) - "/dir_deep_2", # NO_CHANGE - "dir_deep_3/", # CHANGING (packaged -> ignored) - "dir_deep_4/*", # CHANGING (packaged -> ignored) - "dir_deep_5/**", # CHANGING (packaged -> ignored) - ] - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .file("bar.txt", "") - .file("src/bar.txt", "") - // File in root. - .file("file_root_1", "") - .file("file_root_2", "") - .file("file_root_3", "") - .file("file_root_4", "") - .file("file_root_5", "") - // File in sub-dir. - .file("some_dir/file_deep_1", "") - .file("some_dir/file_deep_2", "") - .file("some_dir/file_deep_3", "") - .file("some_dir/file_deep_4", "") - .file("some_dir/file_deep_5", "") - // Dir in root. - .file("dir_root_1/some_dir/file", "") - .file("dir_root_2/some_dir/file", "") - .file("dir_root_3/some_dir/file", "") - .file("dir_root_4/some_dir/file", "") - .file("dir_root_5/some_dir/file", "") - // Dir in sub-dir. - .file("some_dir/dir_deep_1/some_dir/file", "") - .file("some_dir/dir_deep_2/some_dir/file", "") - .file("some_dir/dir_deep_3/some_dir/file", "") - .file("some_dir/dir_deep_4/some_dir/file", "") - .file("some_dir/dir_deep_5/some_dir/file", "") - .build(); - - cargo_process("package --no-verify -v") - .cwd(repo.root()) - .with_stdout("") - .with_stderr( - "\ -[WARNING] manifest has no description[..] -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[PACKAGING] foo v0.0.1 ([..]) -[ARCHIVING] .cargo_vcs_info.json -[ARCHIVING] Cargo.lock -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] file_root_3 -[ARCHIVING] file_root_4 -[ARCHIVING] file_root_5 -[ARCHIVING] some_dir/dir_deep_2/some_dir/file -[ARCHIVING] some_dir/dir_deep_4/some_dir/file -[ARCHIVING] some_dir/dir_deep_5/some_dir/file -[ARCHIVING] some_dir/file_deep_2 -[ARCHIVING] some_dir/file_deep_3 -[ARCHIVING] some_dir/file_deep_4 -[ARCHIVING] some_dir/file_deep_5 -[ARCHIVING] src/main.rs -", - ) - .run(); - - assert!(repo.root().join("target/package/foo-0.0.1.crate").is_file()); - - cargo_process("package -l") - .cwd(repo.root()) - .with_stdout( - "\ -.cargo_vcs_info.json -Cargo.lock -Cargo.toml -Cargo.toml.orig -file_root_3 -file_root_4 -file_root_5 -some_dir/dir_deep_2/some_dir/file -some_dir/dir_deep_4/some_dir/file -some_dir/dir_deep_5/some_dir/file -some_dir/file_deep_2 -some_dir/file_deep_3 -some_dir/file_deep_4 -some_dir/file_deep_5 -src/main.rs -", - ) - .run(); -} - -#[cargo_test] -fn include() { - let root = paths::root().join("include"); - let repo = git::repo(&root) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - exclude = ["*.txt"] - include = ["foo.txt", "**/*.rs", "Cargo.toml", ".dotfile"] - "#, - ) - .file("foo.txt", "") - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .file(".dotfile", "") - // Should be ignored when packaging. - .file("src/bar.txt", "") - .build(); - - cargo_process("package --no-verify -v") - .cwd(repo.root()) - .with_stderr( - "\ -[WARNING] manifest has no description[..] -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[WARNING] both package.include and package.exclude are specified; the exclude list will be ignored -[PACKAGING] foo v0.0.1 ([..]) -[ARCHIVING] .cargo_vcs_info.json -[ARCHIVING] .dotfile -[ARCHIVING] Cargo.lock -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] foo.txt -[ARCHIVING] src/main.rs -", - ) - .run(); -} - -#[cargo_test] -fn package_lib_with_bin() { - let p = project() - .file("src/main.rs", "extern crate foo; fn main() {}") - .file("src/lib.rs", "") - .build(); - - p.cargo("package -v").run(); -} - -#[cargo_test] -fn package_git_submodule() { - let project = git::new("foo", |project| { - project - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = ["foo@example.com"] - license = "MIT" - description = "foo" - repository = "foo" - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - }); - let library = git::new("bar", |library| { - library.no_manifest().file("Makefile", "all:") - }); - - let repository = git2::Repository::open(&project.root()).unwrap(); - let url = path2url(library.root()).to_string(); - git::add_submodule(&repository, &url, Path::new("bar")); - git::commit(&repository); - - let repository = git2::Repository::open(&project.root().join("bar")).unwrap(); - repository - .reset( - &repository.revparse_single("HEAD").unwrap(), - git2::ResetType::Hard, - None, - ) - .unwrap(); - - project - .cargo("package --no-verify -v") - .with_stderr_contains("[ARCHIVING] bar/Makefile") - .run(); -} - -#[cargo_test] -/// Tests if a symlink to a git submodule is properly handled. -/// -/// This test requires you to be able to make symlinks. -/// For windows, this may require you to enable developer mode. -fn package_symlink_to_submodule() { - #[cfg(unix)] - use std::os::unix::fs::symlink; - #[cfg(windows)] - use std::os::windows::fs::symlink_dir as symlink; - - if !symlink_supported() { - return; - } - - let project = git::new("foo", |project| { - project.file("src/lib.rs", "pub fn foo() {}") - }); - - let library = git::new("submodule", |library| { - library.no_manifest().file("Makefile", "all:") - }); - - let repository = git2::Repository::open(&project.root()).unwrap(); - let url = path2url(library.root()).to_string(); - git::add_submodule(&repository, &url, Path::new("submodule")); - t!(symlink( - &project.root().join("submodule"), - &project.root().join("submodule-link") - )); - git::add(&repository); - git::commit(&repository); - - let repository = git2::Repository::open(&project.root().join("submodule")).unwrap(); - repository - .reset( - &repository.revparse_single("HEAD").unwrap(), - git2::ResetType::Hard, - None, - ) - .unwrap(); - - project - .cargo("package --no-verify -v") - .with_stderr_contains("[ARCHIVING] submodule/Makefile") - .run(); -} - -#[cargo_test] -fn no_duplicates_from_modified_tracked_files() { - let p = git::new("all", |p| p.file("src/main.rs", "fn main() {}")); - p.change_file("src/main.rs", r#"fn main() { println!("A change!"); }"#); - p.cargo("build").run(); - p.cargo("package --list --allow-dirty") - .with_stdout( - "\ -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); -} - -#[cargo_test] -fn ignore_nested() { - let cargo_toml = r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#; - let main_rs = r#" - fn main() { println!("hello"); } - "#; - let p = project() - .file("Cargo.toml", cargo_toml) - .file("src/main.rs", main_rs) - // If a project happens to contain a copy of itself, we should - // ignore it. - .file("a_dir/foo/Cargo.toml", cargo_toml) - .file("a_dir/foo/src/main.rs", main_rs) - .build(); - - p.cargo("package") - .with_stderr( - "\ -[WARNING] manifest has no documentation[..] -See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); - p.cargo("package -l") - .with_stdout( - "\ -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); - p.cargo("package").with_stdout("").run(); - - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - validate_crate_contents( - f, - "foo-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - &[], - ); -} - -// Windows doesn't allow these characters in filenames. -#[cfg(unix)] -#[cargo_test] -fn package_weird_characters() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .file("src/:foo", "") - .build(); - - p.cargo("package") - .with_status(101) - .with_stderr( - "\ -warning: [..] -See [..] -[ERROR] cannot package a filename with a special character `:`: src/:foo -", - ) - .run(); -} - -#[cargo_test] -fn repackage_on_source_change() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("package").run(); - - // Add another source file - p.change_file("src/foo.rs", r#"fn main() { println!("foo"); }"#); - - // Check that cargo rebuilds the tarball - p.cargo("package") - .with_stderr( - "\ -[WARNING] [..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - // Check that the tarball contains the added file - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - validate_crate_contents( - f, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - "src/foo.rs", - ], - &[], - ); -} - -#[cargo_test] -/// Tests if a broken symlink is properly handled when packaging. -/// -/// This test requires you to be able to make symlinks. -/// For windows, this may require you to enable developer mode. -fn broken_symlink() { - #[cfg(unix)] - use std::os::unix::fs::symlink; - #[cfg(windows)] - use std::os::windows::fs::symlink_dir as symlink; - - if !symlink_supported() { - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = 'foo' - documentation = 'foo' - homepage = 'foo' - repository = 'foo' - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - t!(symlink("nowhere", &p.root().join("src/foo.rs"))); - - p.cargo("package -v") - .with_status(101) - .with_stderr_contains( - "\ -[ERROR] failed to prepare local package for uploading - -Caused by: - failed to open for archiving: `[..]foo.rs` - -Caused by: - [..] -", - ) - .run(); -} - -#[cargo_test] -/// Tests if a symlink to a directory is properly included. -/// -/// This test requires you to be able to make symlinks. -/// For windows, this may require you to enable developer mode. -fn package_symlink_to_dir() { - if !symlink_supported() { - return; - } - - project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .file("bla/Makefile", "all:") - .symlink_dir("bla", "foo") - .build() - .cargo("package -v") - .with_stderr_contains("[ARCHIVING] foo/Makefile") - .run(); -} - -#[cargo_test] -/// Tests if a symlink to ancestor causes filesystem loop error. -/// -/// This test requires you to be able to make symlinks. -/// For windows, this may require you to enable developer mode. -fn filesystem_loop() { - if !symlink_supported() { - return; - } - - project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .symlink_dir("a/b", "a/b/c/d/foo") - .build() - .cargo("package -v") - .with_stderr_contains( - "[WARNING] File system loop found: [..]/a/b/c/d/foo points to an ancestor [..]/a/b", - ) - .run(); -} - -#[cargo_test] -fn do_not_package_if_repository_is_dirty() { - let p = project().build(); - - // Create a Git repository containing a minimal Rust project. - let _ = git::repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - // Modify Cargo.toml without committing the change. - p.change_file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - # change - "#, - ); - - p.cargo("package") - .with_status(101) - .with_stderr( - "\ -error: 1 files in the working directory contain changes that were not yet \ -committed into git: - -Cargo.toml - -to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag -", - ) - .run(); -} - -#[cargo_test] -fn dirty_ignored() { - // Cargo warns about an ignored file that will be published. - let (p, repo) = git::new_repo("foo", |p| { - p.file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - description = "foo" - license = "foo" - documentation = "foo" - include = ["src", "build"] - "#, - ) - .file("src/lib.rs", "") - .file(".gitignore", "build") - }); - // Example of adding a file that is confusingly ignored by an overzealous - // gitignore rule. - p.change_file("src/build/mod.rs", ""); - p.cargo("package --list") - .with_status(101) - .with_stderr( - "\ -error: 1 files in the working directory contain changes that were not yet committed into git: - -src/build/mod.rs - -to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag -", - ) - .run(); - // Add the ignored file and make sure it is included. - let mut index = t!(repo.index()); - t!(index.add_path(Path::new("src/build/mod.rs"))); - t!(index.write()); - git::commit(&repo); - p.cargo("package --list") - .with_stderr("") - .with_stdout( - "\ -.cargo_vcs_info.json -Cargo.toml -Cargo.toml.orig -src/build/mod.rs -src/lib.rs -", - ) - .run(); -} - -#[cargo_test] -fn generated_manifest() { - registry::alt_init(); - Package::new("abc", "1.0.0").publish(); - Package::new("def", "1.0.0").alternative(true).publish(); - Package::new("ghi", "1.0.0").publish(); - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - exclude = ["*.txt"] - license = "MIT" - description = "foo" - - [project.metadata] - foo = 'bar' - - [workspace] - - [dependencies] - bar = { path = "bar", version = "0.1" } - def = { version = "1.0", registry = "alternative" } - ghi = "1.0" - abc = "1.0" - "#, - ) - .file("src/main.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("package --no-verify").run(); - - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - let rewritten_toml = format!( - r#"{} -[package] -name = "foo" -version = "0.0.1" -authors = [] -exclude = ["*.txt"] -description = "foo" -license = "MIT" - -[package.metadata] -foo = "bar" -[dependencies.abc] -version = "1.0" - -[dependencies.bar] -version = "0.1" - -[dependencies.def] -version = "1.0" -registry-index = "{}" - -[dependencies.ghi] -version = "1.0" -"#, - cargo::core::package::MANIFEST_PREAMBLE, - registry::alt_registry_url() - ); - - validate_crate_contents( - f, - "foo-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - &[("Cargo.toml", &rewritten_toml)], - ); -} - -#[cargo_test] -fn ignore_workspace_specifier() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - - authors = [] - - [workspace] - - [dependencies] - bar = { path = "bar", version = "0.1" } - "#, - ) - .file("src/main.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - workspace = ".." - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("package --no-verify").cwd("bar").run(); - - let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap(); - let rewritten_toml = format!( - r#"{} -[package] -name = "bar" -version = "0.1.0" -authors = [] -"#, - cargo::core::package::MANIFEST_PREAMBLE - ); - validate_crate_contents( - f, - "bar-0.1.0.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], - &[("Cargo.toml", &rewritten_toml)], - ); -} - -#[cargo_test] -fn package_two_kinds_of_deps() { - Package::new("other", "1.0.0").publish(); - Package::new("other1", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - other = "1.0" - other1 = { version = "1.0" } - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("package --no-verify").run(); -} - -#[cargo_test] -fn test_edition() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["edition"] - [package] - name = "foo" - version = "0.0.1" - authors = [] - edition = "2018" - "#, - ) - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("build -v") - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..]--edition=2018 [..] -", - ) - .run(); -} - -#[cargo_test] -fn edition_with_metadata() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - edition = "2018" - - [package.metadata.docs.rs] - features = ["foobar"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("package").run(); -} - -#[cargo_test] -fn test_edition_malformed() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - edition = "chicken" - "#, - ) - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - failed to parse the `edition` key - -Caused by: - supported edition values are `2015`, `2018`, or `2021`, but `chicken` is unknown -" - .to_string(), - ) - .run(); -} - -#[cargo_test] -fn test_edition_from_the_future() { - let p = project() - .file( - "Cargo.toml", - r#"[package] - edition = "2038" - name = "foo" - version = "99.99.99" - authors = [] - "#, - ) - .file("src/main.rs", r#""#) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - failed to parse the `edition` key - -Caused by: - this version of Cargo is older than the `2038` edition, and only supports `2015`, `2018`, and `2021` editions. -" - .to_string(), - ) - .run(); -} - -#[cargo_test] -fn do_not_package_if_src_was_modified() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .file("dir/foo.txt", "") - .file("bar.txt", "") - .file( - "build.rs", - r#" - use std::fs; - - fn main() { - fs::write("src/generated.txt", - "Hello, world of generated files." - ).expect("failed to create file"); - fs::remove_file("dir/foo.txt").expect("failed to remove file"); - fs::remove_dir("dir").expect("failed to remove dir"); - fs::write("bar.txt", "updated content").expect("failed to update"); - fs::create_dir("new-dir").expect("failed to create dir"); - } - "#, - ) - .build(); - - p.cargo("package") - .with_status(101) - .with_stderr_contains( - "\ -error: failed to verify package tarball - -Caused by: - Source directory was modified by build.rs during cargo publish. \ - Build scripts should not modify anything outside of OUT_DIR. - Changed: [CWD]/target/package/foo-0.0.1/bar.txt - Added: [CWD]/target/package/foo-0.0.1/new-dir - [CWD]/target/package/foo-0.0.1/src/generated.txt - Removed: [CWD]/target/package/foo-0.0.1/dir - [CWD]/target/package/foo-0.0.1/dir/foo.txt - - To proceed despite this, pass the `--no-verify` flag.", - ) - .run(); - - p.cargo("package --no-verify").run(); -} - -#[cargo_test] -fn package_with_select_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [features] - required = [] - optional = [] - "#, - ) - .file( - "src/main.rs", - "#[cfg(not(feature = \"required\"))] - compile_error!(\"This crate requires `required` feature!\"); - fn main() {}", - ) - .build(); - - p.cargo("package --features required").run(); -} - -#[cargo_test] -fn package_with_all_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [features] - required = [] - optional = [] - "#, - ) - .file( - "src/main.rs", - "#[cfg(not(feature = \"required\"))] - compile_error!(\"This crate requires `required` feature!\"); - fn main() {}", - ) - .build(); - - p.cargo("package --all-features").run(); -} - -#[cargo_test] -fn package_no_default_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [features] - default = ["required"] - required = [] - "#, - ) - .file( - "src/main.rs", - "#[cfg(not(feature = \"required\"))] - compile_error!(\"This crate requires `required` feature!\"); - fn main() {}", - ) - .build(); - - p.cargo("package --no-default-features") - .with_stderr_contains("error: This crate requires `required` feature!") - .with_status(101) - .run(); -} - -#[cargo_test] -fn include_cargo_toml_implicit() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - include = ["src/lib.rs"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("package --list") - .with_stdout("Cargo.toml\nCargo.toml.orig\nsrc/lib.rs\n") - .run(); -} - -fn include_exclude_test(include: &str, exclude: &str, files: &[&str], expected: &str) { - let mut pb = project().file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - include = {} - exclude = {} - "#, - include, exclude - ), - ); - for file in files { - pb = pb.file(file, ""); - } - let p = pb.build(); - - p.cargo("package --list") - .with_stderr("") - .with_stdout(expected) - .run(); - p.root().rm_rf(); -} - -#[cargo_test] -fn package_include_ignore_only() { - // Test with a gitignore pattern that fails to parse with glob. - // This is a somewhat nonsense pattern, but is an example of something git - // allows and glob does not. - assert!(glob::Pattern::new("src/abc**").is_err()); - - include_exclude_test( - r#"["Cargo.toml", "src/abc**", "src/lib.rs"]"#, - "[]", - &["src/lib.rs", "src/abc1.rs", "src/abc2.rs", "src/abc/mod.rs"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - src/abc/mod.rs\n\ - src/abc1.rs\n\ - src/abc2.rs\n\ - src/lib.rs\n\ - ", - ) -} - -#[cargo_test] -fn gitignore_patterns() { - include_exclude_test( - r#"["Cargo.toml", "foo"]"#, // include - "[]", - &["src/lib.rs", "foo", "a/foo", "a/b/foo", "x/foo/y", "bar"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - a/b/foo\n\ - a/foo\n\ - foo\n\ - x/foo/y\n\ - ", - ); - - include_exclude_test( - r#"["Cargo.toml", "/foo"]"#, // include - "[]", - &["src/lib.rs", "foo", "a/foo", "a/b/foo", "x/foo/y", "bar"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - foo\n\ - ", - ); - - include_exclude_test( - "[]", - r#"["foo/"]"#, // exclude - &["src/lib.rs", "foo", "a/foo", "x/foo/y", "bar"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - a/foo\n\ - bar\n\ - foo\n\ - src/lib.rs\n\ - ", - ); - - include_exclude_test( - "[]", - r#"["*.txt", "[ab]", "[x-z]"]"#, // exclude - &[ - "src/lib.rs", - "foo.txt", - "bar/foo.txt", - "other", - "a", - "b", - "c", - "x", - "y", - "z", - ], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - c\n\ - other\n\ - src/lib.rs\n\ - ", - ); - - include_exclude_test( - r#"["Cargo.toml", "**/foo/bar"]"#, // include - "[]", - &["src/lib.rs", "a/foo/bar", "foo", "bar"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - a/foo/bar\n\ - ", - ); - - include_exclude_test( - r#"["Cargo.toml", "foo/**"]"#, // include - "[]", - &["src/lib.rs", "a/foo/bar", "foo/x/y/z"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - foo/x/y/z\n\ - ", - ); - - include_exclude_test( - r#"["Cargo.toml", "a/**/b"]"#, // include - "[]", - &["src/lib.rs", "a/b", "a/x/b", "a/x/y/b"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - a/b\n\ - a/x/b\n\ - a/x/y/b\n\ - ", - ); -} - -#[cargo_test] -fn gitignore_negate() { - include_exclude_test( - r#"["Cargo.toml", "*.rs", "!foo.rs", "\\!important"]"#, // include - "[]", - &["src/lib.rs", "foo.rs", "!important"], - "!important\n\ - Cargo.toml\n\ - Cargo.toml.orig\n\ - src/lib.rs\n\ - ", - ); - - // NOTE: This is unusual compared to git. Git treats `src/` as a - // short-circuit which means rules like `!src/foo.rs` would never run. - // However, because Cargo only works by iterating over *files*, it doesn't - // short-circuit. - include_exclude_test( - r#"["Cargo.toml", "src/", "!src/foo.rs"]"#, // include - "[]", - &["src/lib.rs", "src/foo.rs"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - src/lib.rs\n\ - ", - ); - - include_exclude_test( - r#"["Cargo.toml", "src/*.rs", "!foo.rs"]"#, // include - "[]", - &["src/lib.rs", "foo.rs", "src/foo.rs", "src/bar/foo.rs"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - src/lib.rs\n\ - ", - ); - - include_exclude_test( - "[]", - r#"["*.rs", "!foo.rs", "\\!important"]"#, // exclude - &["src/lib.rs", "foo.rs", "!important"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - foo.rs\n\ - ", - ); -} - -#[cargo_test] -fn exclude_dot_files_and_directories_by_default() { - include_exclude_test( - "[]", - "[]", - &["src/lib.rs", ".dotfile", ".dotdir/file"], - "Cargo.toml\n\ - Cargo.toml.orig\n\ - src/lib.rs\n\ - ", - ); - - include_exclude_test( - r#"["Cargo.toml", "src/lib.rs", ".dotfile", ".dotdir/file"]"#, - "[]", - &["src/lib.rs", ".dotfile", ".dotdir/file"], - ".dotdir/file\n\ - .dotfile\n\ - Cargo.toml\n\ - Cargo.toml.orig\n\ - src/lib.rs\n\ - ", - ); -} - -#[cargo_test] -fn invalid_license_file_path() { - // Test warning when license-file points to a non-existent file. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - license-file = "does-not-exist" - description = "foo" - homepage = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("package --no-verify") - .with_stderr( - "\ -[WARNING] license-file `does-not-exist` does not appear to exist (relative to `[..]/foo`). -Please update the license-file setting in the manifest at `[..]/foo/Cargo.toml` -This may become a hard error in the future. -[PACKAGING] foo v1.0.0 ([..]/foo) -", - ) - .run(); -} - -#[cargo_test] -fn license_file_implicit_include() { - // license-file should be automatically included even if not listed. - let p = git::new("foo", |p| { - p.file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - license-file = "subdir/LICENSE" - description = "foo" - homepage = "foo" - include = ["src"] - "#, - ) - .file("src/lib.rs", "") - .file("subdir/LICENSE", "license text") - }); - - p.cargo("package --list") - .with_stdout( - "\ -.cargo_vcs_info.json -Cargo.toml -Cargo.toml.orig -src/lib.rs -subdir/LICENSE -", - ) - .with_stderr("") - .run(); - - p.cargo("package --no-verify -v") - .with_stderr( - "\ -[PACKAGING] foo v1.0.0 [..] -[ARCHIVING] .cargo_vcs_info.json -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] src/lib.rs -[ARCHIVING] subdir/LICENSE -", - ) - .run(); - let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap(); - validate_crate_contents( - f, - "foo-1.0.0.crate", - &[ - ".cargo_vcs_info.json", - "Cargo.toml", - "Cargo.toml.orig", - "subdir/LICENSE", - "src/lib.rs", - ], - &[("subdir/LICENSE", "license text")], - ); -} - -#[cargo_test] -fn relative_license_included() { - // license-file path outside of package will copy into root. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - license-file = "../LICENSE" - description = "foo" - homepage = "foo" - "#, - ) - .file("src/lib.rs", "") - .file("../LICENSE", "license text") - .build(); - - p.cargo("package --list") - .with_stdout( - "\ -Cargo.toml -Cargo.toml.orig -LICENSE -src/lib.rs -", - ) - .with_stderr("") - .run(); - - p.cargo("package") - .with_stderr( - "\ -[PACKAGING] foo v1.0.0 [..] -[VERIFYING] foo v1.0.0 [..] -[COMPILING] foo v1.0.0 [..] -[FINISHED] [..] -", - ) - .run(); - let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap(); - validate_crate_contents( - f, - "foo-1.0.0.crate", - &["Cargo.toml", "Cargo.toml.orig", "LICENSE", "src/lib.rs"], - &[("LICENSE", "license text")], - ); - let manifest = - std::fs::read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml")).unwrap(); - assert!(manifest.contains("license-file = \"LICENSE\"")); - let orig = - std::fs::read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml.orig")).unwrap(); - assert!(orig.contains("license-file = \"../LICENSE\"")); -} - -#[cargo_test] -fn relative_license_include_collision() { - // Can't copy a relative license-file if there is a file with that name already. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - license-file = "../LICENSE" - description = "foo" - homepage = "foo" - "#, - ) - .file("src/lib.rs", "") - .file("../LICENSE", "outer license") - .file("LICENSE", "inner license") - .build(); - - p.cargo("package --list") - .with_stdout( - "\ -Cargo.toml -Cargo.toml.orig -LICENSE -src/lib.rs -", - ) - .with_stderr("[WARNING] license-file `../LICENSE` appears to be [..]") - .run(); - - p.cargo("package") - .with_stderr( - "\ -[WARNING] license-file `../LICENSE` appears to be [..] -[PACKAGING] foo v1.0.0 [..] -[VERIFYING] foo v1.0.0 [..] -[COMPILING] foo v1.0.0 [..] -[FINISHED] [..] -", - ) - .run(); - let f = File::open(&p.root().join("target/package/foo-1.0.0.crate")).unwrap(); - validate_crate_contents( - f, - "foo-1.0.0.crate", - &["Cargo.toml", "Cargo.toml.orig", "LICENSE", "src/lib.rs"], - &[("LICENSE", "inner license")], - ); - let manifest = read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml")).unwrap(); - assert!(manifest.contains("license-file = \"LICENSE\"")); - let orig = read_to_string(p.root().join("target/package/foo-1.0.0/Cargo.toml.orig")).unwrap(); - assert!(orig.contains("license-file = \"../LICENSE\"")); -} - -#[cargo_test] -#[cfg(not(windows))] // Don't want to create invalid files on Windows. -fn package_restricted_windows() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - license = "MIT" - description = "foo" - homepage = "foo" - "#, - ) - .file("src/lib.rs", "pub mod con;\npub mod aux;") - .file("src/con.rs", "pub fn f() {}") - .file("src/aux/mod.rs", "pub fn f() {}") - .build(); - - p.cargo("package") - // use unordered here because the order of the warning is different on each platform. - .with_stderr_unordered( - "\ -[WARNING] file src/aux/mod.rs is a reserved Windows filename, it will not work on Windows platforms -[WARNING] file src/con.rs is a reserved Windows filename, it will not work on Windows platforms -[PACKAGING] foo [..] -[VERIFYING] foo [..] -[COMPILING] foo [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn finds_git_in_parent() { - // Test where `Cargo.toml` is not in the root of the git repo. - let repo_path = paths::root().join("repo"); - fs::create_dir(&repo_path).unwrap(); - let p = project() - .at("repo/foo") - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/lib.rs", "") - .build(); - let repo = git::init(&repo_path); - git::add(&repo); - git::commit(&repo); - p.change_file("ignoreme", ""); - p.change_file("ignoreme2", ""); - p.cargo("package --list --allow-dirty") - .with_stdout( - "\ -Cargo.toml -Cargo.toml.orig -ignoreme -ignoreme2 -src/lib.rs -", - ) - .run(); - - p.change_file(".gitignore", "ignoreme"); - p.cargo("package --list --allow-dirty") - .with_stdout( - "\ -.gitignore -Cargo.toml -Cargo.toml.orig -ignoreme2 -src/lib.rs -", - ) - .run(); - - fs::write(repo_path.join(".gitignore"), "ignoreme2").unwrap(); - p.cargo("package --list --allow-dirty") - .with_stdout( - "\ -.gitignore -Cargo.toml -Cargo.toml.orig -src/lib.rs -", - ) - .run(); -} - -#[cargo_test] -#[cfg(windows)] -fn reserved_windows_name() { - Package::new("bar", "1.0.0") - .file("src/lib.rs", "pub mod aux;") - .file("src/aux.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [dependencies] - bar = "1.0.0" - "#, - ) - .file("src/main.rs", "extern crate bar;\nfn main() { }") - .build(); - p.cargo("package") - .with_status(101) - .with_stderr_contains( - "\ -error: failed to verify package tarball - -Caused by: - failed to download replaced source registry `[..]` - -Caused by: - failed to unpack package `[..] `[..]`)` - -Caused by: - failed to unpack entry at `[..]aux.rs` - -Caused by: - `[..]aux.rs` appears to contain a reserved Windows path, it cannot be extracted on Windows - -Caused by: - failed to unpack `[..]aux.rs` - -Caused by: - failed to unpack `[..]aux.rs` into `[..]aux.rs`", - ) - .run(); -} - -#[cargo_test] -fn list_with_path_and_lock() { - // Allow --list even for something that isn't packageable. - - // Init an empty registry because a versionless path dep will search for - // the package on crates.io. - registry::init(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - license = "MIT" - description = "foo" - homepage = "foo" - - [dependencies] - bar = {path="bar"} - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("package --list") - .with_stdout( - "\ -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); - - p.cargo("package") - .with_status(101) - .with_stderr( - "\ -[ERROR] all dependencies must have a version specified when packaging. -dependency `bar` does not specify a version -Note: The packaged dependency will use the version from crates.io, -the `path` specification will be removed from the dependency declaration. -", - ) - .run(); -} - -#[cargo_test] -fn long_file_names() { - // Filenames over 100 characters require a GNU extension tarfile. - // See #8453. - - registry::init(); - let long_name = concat!( - "012345678901234567890123456789012345678901234567890123456789", - "012345678901234567890123456789012345678901234567890123456789", - "012345678901234567890123456789012345678901234567890123456789" - ); - if cfg!(windows) { - // Long paths on Windows require a special registry entry that is - // disabled by default (even on Windows 10). - // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file - // If the directory where Cargo runs happens to be more than 80 characters - // long, then it will bump into this limit. - // - // First create a directory to account for various paths Cargo will - // be using in the target directory (such as "target/package/foo-0.1.0"). - let test_path = paths::root().join("test-dir-probe-long-path-support"); - test_path.mkdir_p(); - let test_path = test_path.join(long_name); - if let Err(e) = File::create(&test_path) { - // write to stderr directly to avoid output from being captured - // and always display text, even without --nocapture - use std::io::Write; - writeln!( - std::io::stderr(), - "\nSkipping long_file_names test, this OS or filesystem does not \ - appear to support long file paths: {:?}\n{:?}", - e, - test_path - ) - .unwrap(); - return; - } - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - license = "MIT" - description = "foo" - homepage = "foo" - - [dependencies] - "#, - ) - .file(long_name, "something") - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("package").run(); - p.cargo("package --list") - .with_stdout(&format!( - "\ -{} -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - long_name - )) - .run(); -} - -#[cargo_test] -fn reproducible_output() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - exclude = ["*.txt"] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("package").run(); - assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); - - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - let decoder = GzDecoder::new(f); - let mut archive = Archive::new(decoder); - for ent in archive.entries().unwrap() { - let ent = ent.unwrap(); - println!("checking {:?}", ent.path()); - let header = ent.header(); - assert_eq!(header.mode().unwrap(), 0o644); - assert!(header.mtime().unwrap() != 0); - assert_eq!(header.username().unwrap().unwrap(), ""); - assert_eq!(header.groupname().unwrap().unwrap(), ""); - } -} - -#[cargo_test] -fn package_with_resolver_and_metadata() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - resolver = '2' - - [package.metadata.docs.rs] - all-features = true - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("package").run(); -} - -#[cargo_test] -fn deleted_git_working_tree() { - // When deleting a file, but not staged, cargo should ignore the file. - let (p, repo) = git::new_repo("foo", |p| { - p.file("src/lib.rs", "").file("src/main.rs", "fn main() {}") - }); - p.root().join("src/lib.rs").rm_rf(); - p.cargo("package --allow-dirty --list") - .with_stdout( - "\ -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); - p.cargo("package --allow-dirty").run(); - let mut index = t!(repo.index()); - t!(index.remove(Path::new("src/lib.rs"), 0)); - t!(index.write()); - p.cargo("package --allow-dirty --list") - .with_stdout( - "\ -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); - p.cargo("package --allow-dirty").run(); -} - -#[cargo_test] -fn in_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - license = "MIT" - description = "bar" - workspace = ".." - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - p.cargo("package --workspace") - .with_stderr( - "\ -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] bar v0.0.1 ([CWD]/bar) -[VERIFYING] bar v0.0.1 ([CWD]/bar) -[COMPILING] bar v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); - assert!(p.root().join("target/package/bar-0.0.1.crate").is_file()); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/package_features.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/package_features.rs deleted file mode 100644 index 31c1867cb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/package_features.rs +++ /dev/null @@ -1,708 +0,0 @@ -//! Tests for feature selection on the command-line. - -use super::features2::switch_to_resolver_2; -use cargo_test_support::registry::{Dependency, Package}; -use cargo_test_support::{basic_manifest, project}; -use std::fmt::Write; - -#[cargo_test] -fn virtual_no_default_features() { - // --no-default-features in root of virtual workspace. - Package::new("dep1", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [dependencies] - dep1 = {version = "1.0", optional = true} - - [features] - default = ["dep1"] - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [features] - default = ["f1"] - f1 = [] - "#, - ) - .file( - "b/src/lib.rs", - r#" - #[cfg(feature = "f1")] - compile_error!{"expected f1 off"} - "#, - ) - .build(); - - p.cargo("check --no-default-features") - .with_stderr_unordered( - "\ -[UPDATING] [..] -[CHECKING] a v0.1.0 [..] -[CHECKING] b v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("check --features foo") - .with_status(101) - .with_stderr( - "[ERROR] none of the selected packages contains these features: foo, did you mean: f1?", - ) - .run(); - - p.cargo("check --features a/dep1,b/f1,b/f2,f2") - .with_status(101) - .with_stderr("[ERROR] none of the selected packages contains these features: b/f2, f2, did you mean: f1?") - .run(); - - p.cargo("check --features a/dep,b/f1,b/f2,f2") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("[ERROR] none of the selected packages contains these features: a/dep, b/f2, f2, did you mean: a/dep1, f1?") - .run(); - - p.cargo("check --features a/dep,a/dep1") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("[ERROR] none of the selected packages contains these features: a/dep, did you mean: b/f1?") - .run(); -} - -#[cargo_test] -fn virtual_typo_member_feature() { - project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - resolver = "2" - - [features] - deny-warnings = [] - "#, - ) - .file("src/lib.rs", "") - .build() - .cargo("check --features a/deny-warning") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "[ERROR] none of the selected packages contains these features: a/deny-warning, did you mean: a/deny-warnings?", - ) - .run(); -} - -#[cargo_test] -fn virtual_features() { - // --features in root of virtual workspace. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [features] - f1 = [] - "#, - ) - .file( - "a/src/lib.rs", - r#" - #[cfg(not(feature = "f1"))] - compile_error!{"f1 is missing"} - "#, - ) - .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("check --features f1") - .with_stderr_unordered( - "\ -[CHECKING] a [..] -[CHECKING] b [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn virtual_with_specific() { - // -p flags with --features in root of virtual. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [features] - f1 = [] - f2 = [] - "#, - ) - .file( - "a/src/lib.rs", - r#" - #[cfg(not_feature = "f1")] - compile_error!{"f1 is missing"} - #[cfg(not_feature = "f2")] - compile_error!{"f2 is missing"} - "#, - ) - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [features] - f2 = [] - f3 = [] - "#, - ) - .file( - "b/src/lib.rs", - r#" - #[cfg(not_feature = "f2")] - compile_error!{"f2 is missing"} - #[cfg(not_feature = "f3")] - compile_error!{"f3 is missing"} - "#, - ) - .build(); - - p.cargo("check -p a -p b --features f1,f2,f3") - .with_stderr_unordered( - "\ -[CHECKING] a [..] -[CHECKING] b [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn other_member_from_current() { - // -p for another member while in the current directory. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path="bar", features=["f3"] } - - [features] - f1 = ["bar/f4"] - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [features] - f1 = [] - f2 = [] - f3 = [] - f4 = [] - "#, - ) - .file("bar/src/lib.rs", "") - .file( - "bar/src/main.rs", - r#" - fn main() { - if cfg!(feature = "f1") { - print!("f1"); - } - if cfg!(feature = "f2") { - print!("f2"); - } - if cfg!(feature = "f3") { - print!("f3"); - } - if cfg!(feature = "f4") { - print!("f4"); - } - println!(); - } - "#, - ) - .build(); - - // Old behavior. - p.cargo("run -p bar --features f1") - .with_stdout("f3f4") - .run(); - - p.cargo("run -p bar --features f1,f2") - .with_status(101) - .with_stderr("[ERROR] Package `foo[..]` does not have the feature `f2`") - .run(); - - p.cargo("run -p bar --features bar/f1") - .with_stdout("f1f3") - .run(); - - // New behavior. - switch_to_resolver_2(&p); - p.cargo("run -p bar --features f1").with_stdout("f1").run(); - - p.cargo("run -p bar --features f1,f2") - .with_stdout("f1f2") - .run(); - - p.cargo("run -p bar --features bar/f1") - .with_stdout("f1") - .run(); -} - -#[cargo_test] -fn feature_default_resolver() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [features] - test = [] - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(feature = "test") { - println!("feature set"); - } - } - "#, - ) - .build(); - - p.cargo("check --features testt") - .with_status(101) - .with_stderr("[ERROR] Package `a[..]` does not have the feature `testt`") - .run(); - - p.cargo("run --features test") - .with_status(0) - .with_stdout("feature set") - .run(); - - p.cargo("run --features a/test") - .with_status(101) - .with_stderr("[ERROR] package `a[..]` does not have a dependency named `a`") - .run(); -} - -#[cargo_test] -fn virtual_member_slash() { - // member slash feature syntax - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [dependencies] - b = {path="../b", optional=true} - - [features] - default = ["f1"] - f1 = [] - f2 = [] - "#, - ) - .file( - "a/src/lib.rs", - r#" - #[cfg(feature = "f1")] - compile_error!{"f1 is set"} - - #[cfg(feature = "f2")] - compile_error!{"f2 is set"} - - #[cfg(feature = "b")] - compile_error!{"b is set"} - "#, - ) - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [features] - bfeat = [] - "#, - ) - .file( - "b/src/lib.rs", - r#" - #[cfg(feature = "bfeat")] - compile_error!{"bfeat is set"} - "#, - ) - .build(); - - p.cargo("check -p a") - .with_status(101) - .with_stderr_contains("[..]f1 is set[..]") - .with_stderr_does_not_contain("[..]f2 is set[..]") - .with_stderr_does_not_contain("[..]b is set[..]") - .run(); - - p.cargo("check -p a --features a/f1") - .with_status(101) - .with_stderr_contains("[..]f1 is set[..]") - .with_stderr_does_not_contain("[..]f2 is set[..]") - .with_stderr_does_not_contain("[..]b is set[..]") - .run(); - - p.cargo("check -p a --features a/f2") - .with_status(101) - .with_stderr_contains("[..]f1 is set[..]") - .with_stderr_contains("[..]f2 is set[..]") - .with_stderr_does_not_contain("[..]b is set[..]") - .run(); - - p.cargo("check -p a --features b/bfeat") - .with_status(101) - .with_stderr_contains("[..]bfeat is set[..]") - .run(); - - p.cargo("check -p a --no-default-features").run(); - - p.cargo("check -p a --no-default-features --features b") - .with_status(101) - .with_stderr_contains("[..]b is set[..]") - .run(); -} - -#[cargo_test] -fn non_member() { - // -p for a non-member - Package::new("dep", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "2" - - [dependencies] - dep = "1.0" - - [features] - f1 = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -p dep --features f1") - .with_status(101) - .with_stderr( - "[UPDATING][..]\n[ERROR] cannot specify features for packages outside of workspace", - ) - .run(); - - p.cargo("build -p dep --all-features") - .with_status(101) - .with_stderr("[ERROR] cannot specify features for packages outside of workspace") - .run(); - - p.cargo("build -p dep --no-default-features") - .with_status(101) - .with_stderr("[ERROR] cannot specify features for packages outside of workspace") - .run(); - - p.cargo("build -p dep") - .with_stderr( - "\ -[DOWNLOADING] [..] -[DOWNLOADED] [..] -[COMPILING] dep [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn resolver1_member_features() { - // --features member-name/feature-name with resolver="1" - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["member1", "member2"] - "#, - ) - .file( - "member1/Cargo.toml", - r#" - [package] - name = "member1" - version = "0.1.0" - - [features] - m1-feature = [] - "#, - ) - .file( - "member1/src/main.rs", - r#" - fn main() { - if cfg!(feature = "m1-feature") { - println!("m1-feature set"); - } - } - "#, - ) - .file("member2/Cargo.toml", &basic_manifest("member2", "0.1.0")) - .file("member2/src/lib.rs", "") - .build(); - - p.cargo("run -p member1 --features member1/m1-feature") - .cwd("member2") - .with_stdout("m1-feature set") - .run(); - - p.cargo("check -p member1 --features member1/m2-feature") - .cwd("member2") - .with_status(101) - .with_stderr("[ERROR] Package `member1[..]` does not have the feature `m2-feature`") - .run(); -} - -#[cargo_test] -fn non_member_feature() { - // --features for a non-member - Package::new("jazz", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("jazz", "1.0").optional(true)) - .publish(); - let make_toml = |resolver, optional| { - let mut s = String::new(); - write!( - s, - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "{}" - - [dependencies] - "#, - resolver - ) - .unwrap(); - if optional { - s.push_str(r#"bar = { version = "1.0", optional = true } "#); - } else { - s.push_str(r#"bar = "1.0""#) - } - s.push('\n'); - s - }; - let p = project() - .file("Cargo.toml", &make_toml("1", false)) - .file("src/lib.rs", "") - .build(); - p.cargo("fetch").run(); - ///////////////////////// V1 non-optional - eprintln!("V1 non-optional"); - p.cargo("check -p bar") - .with_stderr( - "\ -[CHECKING] bar v1.0.0 -[FINISHED] [..] -", - ) - .run(); - // TODO: This should not be allowed (future warning?) - p.cargo("check --features bar/jazz") - .with_stderr( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] jazz v1.0.0 [..] -[CHECKING] jazz v1.0.0 -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - // TODO: This should not be allowed (future warning?) - p.cargo("check -p bar --features bar/jazz -v") - .with_stderr( - "\ -[FRESH] jazz v1.0.0 -[FRESH] bar v1.0.0 -[FINISHED] [..] -", - ) - .run(); - - ///////////////////////// V1 optional - eprintln!("V1 optional"); - p.change_file("Cargo.toml", &make_toml("1", true)); - - // This error isn't great, but is probably unlikely to be common in - // practice, so I'm not going to put much effort into improving it. - p.cargo("check -p bar") - .with_status(101) - .with_stderr( - "\ -error: package ID specification `bar` did not match any packages - -Did you mean `foo`? -", - ) - .run(); - - p.cargo("check -p bar --features bar -v") - .with_stderr( - "\ -[FRESH] bar v1.0.0 -[FINISHED] [..] -", - ) - .run(); - - // TODO: This should not be allowed (future warning?) - p.cargo("check -p bar --features bar/jazz -v") - .with_stderr( - "\ -[FRESH] jazz v1.0.0 -[FRESH] bar v1.0.0 -[FINISHED] [..] -", - ) - .run(); - - ///////////////////////// V2 non-optional - eprintln!("V2 non-optional"); - p.change_file("Cargo.toml", &make_toml("2", false)); - // TODO: This should not be allowed (future warning?) - p.cargo("check --features bar/jazz -v") - .with_stderr( - "\ -[FRESH] jazz v1.0.0 -[FRESH] bar v1.0.0 -[FRESH] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - p.cargo("check -p bar -v") - .with_stderr( - "\ -[FRESH] bar v1.0.0 -[FINISHED] [..] -", - ) - .run(); - p.cargo("check -p bar --features bar/jazz") - .with_status(101) - .with_stderr("error: cannot specify features for packages outside of workspace") - .run(); - - ///////////////////////// V2 optional - eprintln!("V2 optional"); - p.change_file("Cargo.toml", &make_toml("2", true)); - p.cargo("check -p bar") - .with_status(101) - .with_stderr( - "\ -error: package ID specification `bar` did not match any packages - -Did you mean `foo`? -", - ) - .run(); - // New --features behavior does not look at cwd. - p.cargo("check -p bar --features bar") - .with_status(101) - .with_stderr("error: cannot specify features for packages outside of workspace") - .run(); - p.cargo("check -p bar --features bar/jazz") - .with_status(101) - .with_stderr("error: cannot specify features for packages outside of workspace") - .run(); - p.cargo("check -p bar --features foo/bar") - .with_status(101) - .with_stderr("error: cannot specify features for packages outside of workspace") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/patch.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/patch.rs deleted file mode 100644 index 28b5fbfe0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/patch.rs +++ /dev/null @@ -1,2543 +0,0 @@ -//! Tests for `[patch]` table source replacement. - -use cargo_test_support::git; -use cargo_test_support::paths; -use cargo_test_support::registry::{self, Package}; -use cargo_test_support::{basic_manifest, project}; -use std::fs; - -#[cargo_test] -fn replace() { - Package::new("bar", "0.1.0").publish(); - Package::new("baz", "0.1.0") - .file( - "src/lib.rs", - "extern crate bar; pub fn baz() { bar::bar(); }", - ) - .dep("bar", "0.1.0") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - baz = "0.1.0" - - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file( - "src/lib.rs", - " - extern crate bar; - extern crate baz; - pub fn bar() { - bar::bar(); - baz::baz(); - } - ", - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([CWD]/bar) -[COMPILING] baz v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn from_config() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file( - ".cargo/config.toml", - r#" - [patch.crates-io] - bar = { path = 'bar' } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.1 ([..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn from_config_relative() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file( - "../.cargo/config.toml", - r#" - [patch.crates-io] - bar = { path = 'foo/bar' } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.1 ([..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn from_config_precedence() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = 'no-such-path' } - "#, - ) - .file( - ".cargo/config.toml", - r#" - [patch.crates-io] - bar = { path = 'bar' } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.1 ([..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn nonexistent() { - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.0 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn patch_git() { - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = {{ git = '{}' }} - - [patch.'{0}'] - bar = {{ path = "bar" }} - "#, - bar.url() - ), - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `file://[..]` -[COMPILING] bar v0.1.0 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn patch_to_git() { - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = {{ git = '{}' }} - "#, - bar.url() - ), - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `file://[..]` -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.0 (file://[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn unused() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) - .file("bar/src/lib.rs", "not rust code") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 [..] -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build") - .with_stderr( - "\ -[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[FINISHED] [..] -", - ) - .run(); - - // unused patch should be in the lock file - let lock = p.read_lockfile(); - let toml: toml::Value = toml::from_str(&lock).unwrap(); - assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); - assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar")); - assert_eq!( - toml["patch"]["unused"][0]["version"].as_str(), - Some("0.2.0") - ); -} - -#[cargo_test] -fn unused_with_mismatch_source_being_patched() { - registry::alt_init(); - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.alternative] - bar = { path = "bar" } - - [patch.crates-io] - bar = { path = "baz" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) - .file("bar/src/lib.rs", "not rust code") - .file("baz/Cargo.toml", &basic_manifest("bar", "0.3.0")) - .file("baz/src/lib.rs", "not rust code") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. -Perhaps you misspell the source URL being patched. -Possible URLs for `[patch.]`: - crates-io -[WARNING] Patch `bar v0.3.0 ([CWD]/baz)` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 [..] -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn prefer_patch_version() { - Package::new("bar", "0.1.2").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build") - .with_stderr( - "\ -[FINISHED] [..] -", - ) - .run(); - - // there should be no patch.unused in the toml file - let lock = p.read_lockfile(); - let toml: toml::Value = toml::from_str(&lock).unwrap(); - assert!(toml.get("patch").is_none()); -} - -#[cargo_test] -fn unused_from_config() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file( - ".cargo/config.toml", - r#" - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) - .file("bar/src/lib.rs", "not rust code") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 [..] -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build") - .with_stderr( - "\ -[WARNING] Patch `bar v0.2.0 ([CWD]/bar)` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[FINISHED] [..] -", - ) - .run(); - - // unused patch should be in the lock file - let lock = p.read_lockfile(); - let toml: toml::Value = toml::from_str(&lock).unwrap(); - assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); - assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar")); - assert_eq!( - toml["patch"]["unused"][0]["version"].as_str(), - Some("0.2.0") - ); -} - -#[cargo_test] -fn unused_git() { - Package::new("bar", "0.1.0").publish(); - - let foo = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.2.0")) - .file("src/lib.rs", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = {{ git = '{}' }} - "#, - foo.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `file://[..]` -[UPDATING] `dummy-registry` index -[WARNING] Patch `bar v0.2.0 ([..])` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 [..] -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build") - .with_stderr( - "\ -[WARNING] Patch `bar v0.2.0 ([..])` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn add_patch() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 [..] -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = 'bar' } - "#, - ); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn add_patch_from_config() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 [..] -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); - - p.change_file( - ".cargo/config.toml", - r#" - [patch.crates-io] - bar = { path = 'bar' } - "#, - ); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn add_ignored_patch() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 [..] -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = 'bar' } - "#, - ); - - p.cargo("build") - .with_stderr( - "\ -[WARNING] Patch `bar v0.1.1 ([CWD]/bar)` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); - p.cargo("build") - .with_stderr( - "\ -[WARNING] Patch `bar v0.1.1 ([CWD]/bar)` was not used in the crate graph. -Check that [..] -with the [..] -what is [..] -version. [..] -[FINISHED] [..]", - ) - .run(); - - p.cargo("update").run(); - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.1.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [..] -", - ) - .run(); -} - -#[cargo_test] -fn add_patch_with_features() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = 'bar', features = ["some_feature"] } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[WARNING] patch for `bar` uses the features mechanism. \ -default-features and features will not take effect because the patch dependency does not support this mechanism -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.0 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build") - .with_stderr( - "\ -[WARNING] patch for `bar` uses the features mechanism. \ -default-features and features will not take effect because the patch dependency does not support this mechanism -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn add_patch_with_setting_default_features() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = 'bar', default-features = false, features = ["none_default_feature"] } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[WARNING] patch for `bar` uses the features mechanism. \ -default-features and features will not take effect because the patch dependency does not support this mechanism -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.0 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - p.cargo("build") - .with_stderr( - "\ -[WARNING] patch for `bar` uses the features mechanism. \ -default-features and features will not take effect because the patch dependency does not support this mechanism -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn no_warn_ws_patch() { - Package::new("c", "0.1.0").publish(); - - // Don't issue an unused patch warning when the patch isn't used when - // partially building a workspace. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b", "c"] - - [patch.crates-io] - c = { path = "c" } - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - [dependencies] - c = "0.1.0" - "#, - ) - .file("b/src/lib.rs", "") - .file("c/Cargo.toml", &basic_manifest("c", "0.1.0")) - .file("c/src/lib.rs", "") - .build(); - - p.cargo("build -p a") - .with_stderr( - "\ -[UPDATING] [..] -[COMPILING] a [..] -[FINISHED] [..]", - ) - .run(); -} - -#[cargo_test] -fn new_minor() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [patch.crates-io] - bar = { path = 'bar' } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.1 [..] -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn transitive_new_minor() { - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = 'bar' } - - [patch.crates-io] - baz = { path = 'baz' } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = '0.1.0' - "#, - ) - .file("bar/src/lib.rs", r#""#) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.1")) - .file("baz/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] baz v0.1.1 [..] -[COMPILING] bar v0.1.0 [..] -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn new_major() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.2.0" - - [patch.crates-io] - bar = { path = 'bar' } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.2.0 [..] -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - Package::new("bar", "0.2.0").publish(); - p.cargo("update").run(); - p.cargo("build") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.2.0" - "#, - ); - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.2.0 [..] -[COMPILING] bar v0.2.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn transitive_new_major() { - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = 'bar' } - - [patch.crates-io] - baz = { path = 'baz' } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = '0.2.0' - "#, - ) - .file("bar/src/lib.rs", r#""#) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.2.0")) - .file("baz/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] baz v0.2.0 [..] -[COMPILING] bar v0.1.0 [..] -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn shared_by_transitive() { - Package::new("baz", "0.1.1").publish(); - - let baz = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("baz", "0.1.2")) - .file("src/lib.rs", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = " 0.1.0" - - [dependencies] - bar = {{ path = "bar" }} - baz = "0.1" - - [patch.crates-io] - baz = {{ git = "{}", version = "0.1" }} - "#, - baz.url(), - ), - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [dependencies] - baz = "0.1.1" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] git repository `file://[..]` -[UPDATING] `dummy-registry` index -[COMPILING] baz v0.1.2 [..] -[COMPILING] bar v0.1.0 [..] -[COMPILING] foo v0.1.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn remove_patch() { - Package::new("foo", "0.1.0").publish(); - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - - [patch.crates-io] - foo = { path = 'foo' } - bar = { path = 'bar' } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", r#""#) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", r#""#) - .build(); - - // Generate a lock file where `foo` is unused - p.cargo("build").run(); - let lock_file1 = p.read_lockfile(); - - // Remove `foo` and generate a new lock file form the old one - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = { path = 'bar' } - "#, - ); - p.cargo("build").run(); - let lock_file2 = p.read_lockfile(); - - // Remove the lock file and build from scratch - fs::remove_file(p.root().join("Cargo.lock")).unwrap(); - p.cargo("build").run(); - let lock_file3 = p.read_lockfile(); - - assert!(lock_file1.contains("foo")); - assert_eq!(lock_file2, lock_file3); - assert_ne!(lock_file1, lock_file2); -} - -#[cargo_test] -fn non_crates_io() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [patch.some-other-source] - bar = { path = 'bar' } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - [patch] entry `some-other-source` should be a URL or registry name - -Caused by: - invalid url `some-other-source`: relative URL without a base -", - ) - .run(); -} - -#[cargo_test] -fn replace_with_crates_io() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [patch.crates-io] - bar = "0.1" - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", r#""#) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -error: failed to resolve patches for `[..]` - -Caused by: - patch for `bar` in `[..]` points to the same source, but patches must point \ - to different sources -", - ) - .run(); -} - -#[cargo_test] -fn patch_in_virtual() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", r#""#) - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "0.1" - "#, - ) - .file("foo/src/lib.rs", r#""#) - .build(); - - p.cargo("build").run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn patch_depends_on_another_patch() { - Package::new("bar", "0.1.0") - .file("src/lib.rs", "broken code") - .publish(); - - Package::new("baz", "0.1.0") - .dep("bar", "0.1") - .file("src/lib.rs", "broken code") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.1.0" - - [dependencies] - bar = "0.1" - baz = "0.1" - - [patch.crates-io] - bar = { path = "bar" } - baz = { path = "baz" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", r#""#) - .file( - "baz/Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.1" - authors = [] - - [dependencies] - bar = "0.1" - "#, - ) - .file("baz/src/lib.rs", r#""#) - .build(); - - p.cargo("build").run(); - - // Nothing should be rebuilt, no registry should be updated. - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn replace_prerelease() { - Package::new("baz", "1.1.0-pre.1").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - - [patch.crates-io] - baz = { path = "./baz" } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - baz = "1.1.0-pre.1" - "#, - ) - .file( - "bar/src/main.rs", - "extern crate baz; fn main() { baz::baz() }", - ) - .file( - "baz/Cargo.toml", - r#" - [project] - name = "baz" - version = "1.1.0-pre.1" - authors = [] - [workspace] - "#, - ) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn patch_older() { - Package::new("baz", "1.0.2").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = 'bar' } - baz = "=1.0.1" - - [patch.crates-io] - baz = { path = "./baz" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - baz = "1.0.0" - "#, - ) - .file("bar/src/lib.rs", "") - .file( - "baz/Cargo.toml", - r#" - [project] - name = "baz" - version = "1.0.1" - authors = [] - "#, - ) - .file("baz/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] [..] -[COMPILING] baz v1.0.1 [..] -[COMPILING] bar v0.5.0 [..] -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn cycle() { - Package::new("a", "1.0.0").publish(); - Package::new("b", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - - [patch.crates-io] - a = {path="a"} - b = {path="b"} - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "1.0.0" - - [dependencies] - b = "1.0" - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "1.0.0" - - [dependencies] - a = "1.0" - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] cyclic package dependency: [..] -package `[..]` - ... which satisfies dependency `[..]` of package `[..]` - ... which satisfies dependency `[..]` of package `[..]` -", - ) - .run(); -} - -#[cargo_test] -fn multipatch() { - Package::new("a", "1.0.0").publish(); - Package::new("a", "2.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - a1 = { version = "1", package = "a" } - a2 = { version = "2", package = "a" } - - [patch.crates-io] - b1 = { path = "a1", package = "a" } - b2 = { path = "a2", package = "a" } - "#, - ) - .file("src/lib.rs", "pub fn foo() { a1::f1(); a2::f2(); }") - .file( - "a1/Cargo.toml", - r#" - [package] - name = "a" - version = "1.0.0" - "#, - ) - .file("a1/src/lib.rs", "pub fn f1() {}") - .file( - "a2/Cargo.toml", - r#" - [package] - name = "a" - version = "2.0.0" - "#, - ) - .file("a2/src/lib.rs", "pub fn f2() {}") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn patch_same_version() { - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - .build(); - - cargo_test_support::registry::init(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - [dependencies] - bar = "0.1" - [patch.crates-io] - bar = {{ path = "bar" }} - bar2 = {{ git = '{}', package = 'bar' }} - "#, - bar.url(), - ), - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -error: cannot have two `[patch]` entries which both resolve to `bar v0.1.0` -", - ) - .run(); -} - -#[cargo_test] -fn two_semver_compatible() { - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("src/lib.rs", "") - .build(); - - cargo_test_support::registry::init(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - [dependencies] - bar = "0.1" - [patch.crates-io] - bar = {{ path = "bar" }} - bar2 = {{ git = '{}', package = 'bar' }} - "#, - bar.url(), - ), - ) - .file("src/lib.rs", "pub fn foo() { bar::foo() }") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.2" - "#, - ) - .file("bar/src/lib.rs", "pub fn foo() {}") - .build(); - - // assert the build succeeds and doesn't panic anywhere, and then afterwards - // assert that the build succeeds again without updating anything or - // building anything else. - p.cargo("build").run(); - p.cargo("build") - .with_stderr( - "\ -warning: Patch `bar v0.1.1 [..]` was not used in the crate graph. -Perhaps you misspell the source URL being patched. -Possible URLs for `[patch.]`: - [CWD]/bar -[FINISHED] [..]", - ) - .run(); -} - -#[cargo_test] -fn multipatch_select_big() { - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - .build(); - - cargo_test_support::registry::init(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - [dependencies] - bar = "*" - [patch.crates-io] - bar = {{ path = "bar" }} - bar2 = {{ git = '{}', package = 'bar' }} - "#, - bar.url(), - ), - ) - .file("src/lib.rs", "pub fn foo() { bar::foo() }") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.2.0" - "#, - ) - .file("bar/src/lib.rs", "pub fn foo() {}") - .build(); - - // assert the build succeeds, which is only possible if 0.2.0 is selected - // since 0.1.0 is missing the function we need. Afterwards assert that the - // build succeeds again without updating anything or building anything else. - p.cargo("build").run(); - p.cargo("build") - .with_stderr( - "\ -warning: Patch `bar v0.1.0 [..]` was not used in the crate graph. -Perhaps you misspell the source URL being patched. -Possible URLs for `[patch.]`: - [CWD]/bar -[FINISHED] [..]", - ) - .run(); -} - -#[cargo_test] -fn canonicalize_a_bunch() { - let base = git::repo(&paths::root().join("base")) - .file("Cargo.toml", &basic_manifest("base", "0.1.0")) - .file("src/lib.rs", "") - .build(); - - let intermediate = git::repo(&paths::root().join("intermediate")) - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "intermediate" - version = "0.1.0" - - [dependencies] - # Note the lack of trailing slash - base = {{ git = '{}' }} - "#, - base.url(), - ), - ) - .file("src/lib.rs", "pub fn f() { base::f() }") - .build(); - - let newbase = git::repo(&paths::root().join("newbase")) - .file("Cargo.toml", &basic_manifest("base", "0.1.0")) - .file("src/lib.rs", "pub fn f() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - # Note the trailing slashes - base = {{ git = '{base}/' }} - intermediate = {{ git = '{intermediate}/' }} - - [patch.'{base}'] # Note the lack of trailing slash - base = {{ git = '{newbase}' }} - "#, - base = base.url(), - intermediate = intermediate.url(), - newbase = newbase.url(), - ), - ) - .file("src/lib.rs", "pub fn a() { base::f(); intermediate::f() }") - .build(); - - // Once to make sure it actually works - p.cargo("build").run(); - - // Then a few more times for good measure to ensure no weird warnings about - // `[patch]` are printed. - p.cargo("build").with_stderr("[FINISHED] [..]").run(); - p.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn update_unused_new_version() { - // If there is an unused patch entry, and then you update the patch, - // make sure `cargo update` will be able to fix the lock file. - Package::new("bar", "0.1.5").publish(); - - // Start with a lock file to 0.1.5, and an "unused" patch because the - // version is too old. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = "0.1.5" - - [patch.crates-io] - bar = { path = "../bar" } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Patch is too old. - let bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.4")) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr_contains("[WARNING] Patch `bar v0.1.4 [..] was not used in the crate graph.") - .run(); - // unused patch should be in the lock file - let lock = p.read_lockfile(); - let toml: toml::Value = toml::from_str(&lock).unwrap(); - assert_eq!(toml["patch"]["unused"].as_array().unwrap().len(), 1); - assert_eq!(toml["patch"]["unused"][0]["name"].as_str(), Some("bar")); - assert_eq!( - toml["patch"]["unused"][0]["version"].as_str(), - Some("0.1.4") - ); - - // Oh, OK, let's update to the latest version. - bar.change_file("Cargo.toml", &basic_manifest("bar", "0.1.6")); - - // Create a backup so we can test it with different options. - fs::copy(p.root().join("Cargo.lock"), p.root().join("Cargo.lock.bak")).unwrap(); - - // Try to build again, this should automatically update Cargo.lock. - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.6 ([..]/bar) -[COMPILING] foo v0.0.1 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); - // This should not update any registry. - p.cargo("build").with_stderr("[FINISHED] [..]").run(); - assert!(!p.read_lockfile().contains("unused")); - - // Restore the lock file, and see if `update` will work, too. - fs::copy(p.root().join("Cargo.lock.bak"), p.root().join("Cargo.lock")).unwrap(); - - // Try `update -p`. - p.cargo("update -p bar") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[ADDING] bar v0.1.6 ([..]/bar) -[REMOVING] bar v0.1.5 -", - ) - .run(); - - // Try with bare `cargo update`. - fs::copy(p.root().join("Cargo.lock.bak"), p.root().join("Cargo.lock")).unwrap(); - p.cargo("update") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[ADDING] bar v0.1.6 ([..]/bar) -[REMOVING] bar v0.1.5 -", - ) - .run(); -} - -#[cargo_test] -fn too_many_matches() { - // The patch locations has multiple versions that match. - registry::alt_init(); - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.1.0").alternative(true).publish(); - Package::new("bar", "0.1.1").alternative(true).publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = { version = "0.1", registry = "alternative" } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Picks 0.1.1, the most recent version. - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[UPDATING] `alternative` index -[ERROR] failed to resolve patches for `https://github.com/rust-lang/crates.io-index` - -Caused by: - patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve - -Caused by: - patch for `bar` in `registry `alternative`` resolved to more than one candidate - Found versions: 0.1.0, 0.1.1 - Update the patch definition to select only one package. - For example, add an `=` version requirement to the patch definition, such as `version = \"=0.1.1\"`. -", - ) - .run(); -} - -#[cargo_test] -fn no_matches() { - // A patch to a location that does not contain the named package. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("abc", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -error: failed to resolve patches for `https://github.com/rust-lang/crates.io-index` - -Caused by: - patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve - -Caused by: - The patch location `[..]/foo/bar` does not appear to contain any packages matching the name `bar`. -", - ) - .run(); -} - -#[cargo_test] -fn mismatched_version() { - // A patch to a location that has an old version. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1.1" - - [patch.crates-io] - bar = { path = "bar", version = "0.1.1" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to resolve patches for `https://github.com/rust-lang/crates.io-index` - -Caused by: - patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve - -Caused by: - The patch location `[..]/foo/bar` contains a `bar` package with version `0.1.0`, \ - but the patch definition requires `^0.1.1`. - Check that the version in the patch location is what you expect, \ - and update the patch definition to match. -", - ) - .run(); -} - -#[cargo_test] -fn patch_walks_backwards() { - // Starting with a locked patch, change the patch so it points to an older version. - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = {path="bar"} - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[CHECKING] bar v0.1.1 ([..]/foo/bar) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); - - // Somehow the user changes the version backwards. - p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")); - - p.cargo("check") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[CHECKING] bar v0.1.0 ([..]/foo/bar) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn patch_walks_backwards_restricted() { - // This is the same as `patch_walks_backwards`, but the patch contains a - // `version` qualifier. This is unusual, just checking a strange edge case. - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = {path="bar", version="0.1.1"} - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[CHECKING] bar v0.1.1 ([..]/foo/bar) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); - - // Somehow the user changes the version backwards. - p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -error: failed to resolve patches for `https://github.com/rust-lang/crates.io-index` - -Caused by: - patch for `bar` in `https://github.com/rust-lang/crates.io-index` failed to resolve - -Caused by: - The patch location `[..]/foo/bar` contains a `bar` package with version `0.1.0`, but the patch definition requires `^0.1.1`. - Check that the version in the patch location is what you expect, and update the patch definition to match. -", - ) - .run(); -} - -#[cargo_test] -fn patched_dep_new_version() { - // What happens when a patch is locked, and then one of the patched - // dependencies needs to be updated. In this case, the baz requirement - // gets updated from 0.1.0 to 0.1.1. - Package::new("bar", "0.1.0").dep("baz", "0.1.0").publish(); - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = {path="bar"} - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [dependencies] - baz = "0.1" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - // Lock everything. - p.cargo("check") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.1.0 [..] -[CHECKING] baz v0.1.0 -[CHECKING] bar v0.1.0 ([..]/foo/bar) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); - - Package::new("baz", "0.1.1").publish(); - - // Just the presence of the new version should not have changed anything. - p.cargo("check").with_stderr("[FINISHED] [..]").run(); - - // Modify the patch so it requires the new version. - p.change_file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [dependencies] - baz = "0.1.1" - "#, - ); - - // Should unlock and update cleanly. - p.cargo("check") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.1.1 (registry `dummy-registry`) -[CHECKING] baz v0.1.1 -[CHECKING] bar v0.1.0 ([..]/foo/bar) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn patch_update_doesnt_update_other_sources() { - // Very extreme edge case, make sure a patch update doesn't update other - // sources. - registry::alt_init(); - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.1.0").alternative(true).publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - bar_alt = { version = "0.1", registry = "alternative", package = "bar" } - - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_stderr_unordered( - "\ -[UPDATING] `dummy-registry` index -[UPDATING] `alternative` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 (registry `alternative`) -[CHECKING] bar v0.1.0 (registry `alternative`) -[CHECKING] bar v0.1.0 ([..]/foo/bar) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); - - // Publish new versions in both sources. - Package::new("bar", "0.1.1").publish(); - Package::new("bar", "0.1.1").alternative(true).publish(); - - // Since it is locked, nothing should change. - p.cargo("check").with_stderr("[FINISHED] [..]").run(); - - // Require new version on crates.io. - p.change_file("bar/Cargo.toml", &basic_manifest("bar", "0.1.1")); - - // This should not update bar_alt. - p.cargo("check") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[CHECKING] bar v0.1.1 ([..]/foo/bar) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn can_update_with_alt_reg() { - // A patch to an alt reg can update. - registry::alt_init(); - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.1.0").alternative(true).publish(); - Package::new("bar", "0.1.1").alternative(true).publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = { version = "=0.1.1", registry = "alternative" } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_stderr( - "\ -[UPDATING] `alternative` index -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.1 (registry `alternative`) -[CHECKING] bar v0.1.1 (registry `alternative`) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); - - Package::new("bar", "0.1.2").alternative(true).publish(); - - // Should remain locked. - p.cargo("check").with_stderr("[FINISHED] [..]").run(); - - // This does nothing, due to `=` requirement. - p.cargo("update -p bar") - .with_stderr( - "\ -[UPDATING] `alternative` index -[UPDATING] `dummy-registry` index -", - ) - .run(); - - // Bump to 0.1.2. - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - - [patch.crates-io] - bar = { version = "=0.1.2", registry = "alternative" } - "#, - ); - - p.cargo("check") - .with_stderr( - "\ -[UPDATING] `alternative` index -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.2 (registry `alternative`) -[CHECKING] bar v0.1.2 (registry `alternative`) -[CHECKING] foo v0.1.0 ([..]/foo) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn old_git_patch() { - // Example where an old lockfile with an explicit branch="master" in Cargo.toml. - Package::new("bar", "1.0.0").publish(); - let (bar, bar_repo) = git::new_repo("bar", |p| { - p.file("Cargo.toml", &basic_manifest("bar", "1.0.0")) - .file("src/lib.rs", "") - }); - - let bar_oid = bar_repo.head().unwrap().target().unwrap(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - - [patch.crates-io] - bar = {{ git = "{}", branch = "master" }} - "#, - bar.url() - ), - ) - .file( - "Cargo.lock", - &format!( - r#" -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "bar" -version = "1.0.0" -source = "git+{}#{}" - -[[package]] -name = "foo" -version = "0.1.0" -dependencies = [ - "bar", -] - "#, - bar.url(), - bar_oid - ), - ) - .file("src/lib.rs", "") - .build(); - - bar.change_file("Cargo.toml", &basic_manifest("bar", "2.0.0")); - git::add(&bar_repo); - git::commit(&bar_repo); - - // This *should* keep the old lock. - p.cargo("tree") - // .env("CARGO_LOG", "trace") - .with_stderr( - "\ -[UPDATING] [..] -", - ) - // .with_status(1) - .with_stdout(format!( - "\ -foo v0.1.0 [..] -โ””โ”€โ”€ bar v1.0.0 (file:///[..]branch=master#{}) -", - &bar_oid.to_string()[..8] - )) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/path.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/path.rs deleted file mode 100644 index 9bcd220ef..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/path.rs +++ /dev/null @@ -1,1138 +0,0 @@ -//! Tests for `path` dependencies. - -use cargo_test_support::paths::{self, CargoPathExt}; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_lib_manifest, basic_manifest, main_file, project}; -use cargo_test_support::{sleep_ms, t}; -use std::fs; - -#[cargo_test] -// I have no idea why this is failing spuriously on Windows; -// for more info, see #3466. -#[cfg(not(windows))] -fn cargo_compile_with_nested_deps_shorthand() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "bar" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.baz] - - version = "0.5.0" - path = "baz" - - [lib] - - name = "bar" - "#, - ) - .file( - "bar/src/bar.rs", - r#" - extern crate baz; - - pub fn gimme() -> String { - baz::gimme() - } - "#, - ) - .file("bar/baz/Cargo.toml", &basic_lib_manifest("baz")) - .file( - "bar/baz/src/baz.rs", - r#" - pub fn gimme() -> String { - "test passed".to_string() - } - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\ - [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); - - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("test passed\n").run(); - - println!("cleaning"); - p.cargo("clean -v").with_stdout("").run(); - println!("building baz"); - p.cargo("build -p baz") - .with_stderr( - "[COMPILING] baz v0.5.0 ([CWD]/bar/baz)\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); - println!("building foo"); - p.cargo("build -p foo") - .with_stderr( - "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn cargo_compile_with_root_dev_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dev-dependencies.bar] - - version = "0.5.0" - path = "../bar" - - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .build(); - let _p2 = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file( - "src/lib.rs", - r#" - pub fn gimme() -> &'static str { - "zoidberg" - } - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]can't find crate for `bar`") - .run(); -} - -#[cargo_test] -fn cargo_compile_with_root_dev_deps_with_testing() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dev-dependencies.bar] - - version = "0.5.0" - path = "../bar" - - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .build(); - let _p2 = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file( - "src/lib.rs", - r#" - pub fn gimme() -> &'static str { - "zoidberg" - } - "#, - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] [..] v0.5.0 ([..]) -[COMPILING] [..] v0.5.0 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn cargo_compile_with_transitive_dev_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "bar" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dev-dependencies.baz] - - git = "git://example.com/path/to/nowhere" - - [lib] - - name = "bar" - "#, - ) - .file( - "bar/src/bar.rs", - r#" - pub fn gimme() -> &'static str { - "zoidberg" - } - "#, - ) - .build(); - - p.cargo("build") - .with_stderr( - "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in \ - [..]\n", - ) - .run(); - - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("zoidberg\n").run(); -} - -#[cargo_test] -fn no_rebuild_dependency() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/bar.rs", "pub fn bar() {}") - .build(); - // First time around we should compile both foo and bar - p.cargo("build") - .with_stderr( - "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); - - sleep_ms(1000); - p.change_file( - "src/main.rs", - r#" - extern crate bar; - fn main() { bar::bar(); } - "#, - ); - // Don't compile bar, but do recompile foo. - p.cargo("build") - .with_stderr( - "[COMPILING] foo v0.5.0 ([..])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn deep_dependencies_trigger_rebuild() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - name = "bar" - [dependencies.baz] - path = "../baz" - "#, - ) - .file( - "bar/src/bar.rs", - "extern crate baz; pub fn bar() { baz::baz() }", - ) - .file("baz/Cargo.toml", &basic_lib_manifest("baz")) - .file("baz/src/baz.rs", "pub fn baz() {}") - .build(); - p.cargo("build") - .with_stderr( - "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ - [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); - p.cargo("build").with_stdout("").run(); - - // Make sure an update to baz triggers a rebuild of bar - // - // We base recompilation off mtime, so sleep for at least a second to ensure - // that this write will change the mtime. - sleep_ms(1000); - p.change_file("baz/src/baz.rs", r#"pub fn baz() { println!("hello!"); }"#); - sleep_ms(1000); - p.cargo("build") - .with_stderr( - "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ - [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); - - // Make sure an update to bar doesn't trigger baz - sleep_ms(1000); - p.change_file( - "bar/src/bar.rs", - r#" - extern crate baz; - pub fn bar() { println!("hello!"); baz::baz(); } - "#, - ); - sleep_ms(1000); - p.cargo("build") - .with_stderr( - "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn no_rebuild_two_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = "bar" - [dependencies.baz] - path = "baz" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::bar() }") - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - - [lib] - name = "bar" - [dependencies.baz] - path = "../baz" - "#, - ) - .file("bar/src/bar.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_lib_manifest("baz")) - .file("baz/src/baz.rs", "pub fn baz() {}") - .build(); - p.cargo("build") - .with_stderr( - "[COMPILING] baz v0.5.0 ([CWD]/baz)\n\ - [COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); - assert!(p.bin("foo").is_file()); - p.cargo("build").with_stdout("").run(); - assert!(p.bin("foo").is_file()); -} - -#[cargo_test] -fn nested_deps_recompile() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "src/bar" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file("src/bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("src/bar/src/bar.rs", "pub fn gimme() -> i32 { 92 }") - .build(); - - p.cargo("build") - .with_stderr( - "[COMPILING] bar v0.5.0 ([CWD]/src/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); - sleep_ms(1000); - - p.change_file("src/main.rs", r#"fn main() {}"#); - - // This shouldn't recompile `bar` - p.cargo("build") - .with_stderr( - "[COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn error_message_for_missing_manifest() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - path = "src/bar" - "#, - ) - .file("src/lib.rs", "") - .file("src/bar/not-a-manifest", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `bar` as a dependency of package `foo v0.5.0 [..]` - -Caused by: - failed to load source for dependency `bar` - -Caused by: - Unable to update [CWD]/src/bar - -Caused by: - failed to read `[..]bar/Cargo.toml` - -Caused by: - [..] (os error [..]) -", - ) - .run(); -} - -#[cargo_test] -fn override_relative() { - let bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "") - .build(); - - fs::create_dir(&paths::root().join(".cargo")).unwrap(); - fs::write(&paths::root().join(".cargo/config"), r#"paths = ["bar"]"#).unwrap(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = '{}' - "#, - bar.root().display() - ), - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build -v").run(); -} - -#[cargo_test] -fn override_self() { - let bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("src/lib.rs", "") - .build(); - - let p = project(); - let root = p.root(); - let p = p - .file(".cargo/config", &format!("paths = ['{}']", root.display())) - .file( - "Cargo.toml", - &format!( - r#" - [package] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - path = '{}' - - "#, - bar.root().display() - ), - ) - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn override_path_dep() { - let bar = project() - .at("bar") - .file( - "p1/Cargo.toml", - r#" - [package] - name = "p1" - version = "0.5.0" - authors = [] - - [dependencies.p2] - path = "../p2" - "#, - ) - .file("p1/src/lib.rs", "") - .file("p2/Cargo.toml", &basic_manifest("p2", "0.5.0")) - .file("p2/src/lib.rs", "") - .build(); - - let p = project() - .file( - ".cargo/config", - &format!( - "paths = ['{}', '{}']", - bar.root().join("p1").display(), - bar.root().join("p2").display() - ), - ) - .file( - "Cargo.toml", - &format!( - r#" - [package] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.p2] - path = '{}' - - "#, - bar.root().join("p2").display() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn path_dep_build_cmd() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - - name = "foo" - version = "0.5.0" - authors = ["wycats@example.com"] - - [dependencies.bar] - - version = "0.5.0" - path = "bar" - "#, - ) - .file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"])) - .file( - "bar/Cargo.toml", - r#" - [project] - - name = "bar" - version = "0.5.0" - authors = ["wycats@example.com"] - build = "build.rs" - - [lib] - name = "bar" - path = "src/bar.rs" - "#, - ) - .file( - "bar/build.rs", - r#" - use std::fs; - fn main() { - fs::copy("src/bar.rs.in", "src/bar.rs").unwrap(); - } - "#, - ) - .file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 0 }") - .build(); - p.root().join("bar").move_into_the_past(); - - p.cargo("build") - .with_stderr( - "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in \ - [..]\n", - ) - .run(); - - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("0\n").run(); - - // Touching bar.rs.in should cause the `build` command to run again. - p.change_file("bar/src/bar.rs.in", "pub fn gimme() -> i32 { 1 }"); - - p.cargo("build") - .with_stderr( - "[COMPILING] bar v0.5.0 ([CWD]/bar)\n\ - [COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) in \ - [..]\n", - ) - .run(); - - p.process(&p.bin("foo")).with_stdout("1\n").run(); -} - -#[cargo_test] -fn dev_deps_no_rebuild_lib() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dev-dependencies.bar] - path = "bar" - - [lib] - name = "foo" - doctest = false - "#, - ) - .file( - "src/lib.rs", - r#" - #[cfg(test)] #[allow(unused_extern_crates)] extern crate bar; - #[cfg(not(test))] pub fn foo() { env!("FOO"); } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - p.cargo("build") - .env("FOO", "bar") - .with_stderr( - "[COMPILING] foo v0.5.0 ([CWD])\n\ - [FINISHED] dev [unoptimized + debuginfo] target(s) \ - in [..]\n", - ) - .run(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] [..] v0.5.0 ([CWD][..]) -[COMPILING] [..] v0.5.0 ([CWD][..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn custom_target_no_rebuild() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - [dependencies] - a = { path = "a" } - [workspace] - members = ["a", "b"] - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.5.0")) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies] - a = { path = "../a" } - "#, - ) - .file("b/src/lib.rs", "") - .build(); - p.cargo("build") - .with_stderr( - "\ -[COMPILING] a v0.5.0 ([..]) -[COMPILING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - t!(fs::rename( - p.root().join("target"), - p.root().join("target_moved") - )); - p.cargo("build --manifest-path=b/Cargo.toml") - .env("CARGO_TARGET_DIR", "target_moved") - .with_stderr( - "\ -[COMPILING] b v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn override_and_depend() { - let p = project() - .no_manifest() - .file( - "a/a1/Cargo.toml", - r#" - [project] - name = "a1" - version = "0.5.0" - authors = [] - [dependencies] - a2 = { path = "../a2" } - "#, - ) - .file("a/a1/src/lib.rs", "") - .file("a/a2/Cargo.toml", &basic_manifest("a2", "0.5.0")) - .file("a/a2/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.5.0" - authors = [] - [dependencies] - a1 = { path = "../a/a1" } - a2 = { path = "../a/a2" } - "#, - ) - .file("b/src/lib.rs", "") - .file("b/.cargo/config", r#"paths = ["../a"]"#) - .build(); - p.cargo("build") - .cwd("b") - .with_stderr( - "\ -[COMPILING] a2 v0.5.0 ([..]) -[COMPILING] a1 v0.5.0 ([..]) -[COMPILING] b v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn missing_path_dependency() { - let p = project() - .file("Cargo.toml", &basic_manifest("a", "0.5.0")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#"paths = ["../whoa-this-does-not-exist"]"#, - ) - .build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to update path override `[..]../whoa-this-does-not-exist` \ -(defined in `[..]`) - -Caused by: - failed to read directory `[..]` - -Caused by: - [..] (os error [..]) -", - ) - .run(); -} - -#[cargo_test] -fn invalid_path_dep_in_workspace_with_lockfile() { - Package::new("bar", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "top" - version = "0.5.0" - authors = [] - - [workspace] - - [dependencies] - foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - // Generate a lock file - p.cargo("build").run(); - - // Change the dependency on `bar` to an invalid path - p.change_file( - "foo/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - bar = { path = "" } - "#, - ); - - // Make sure we get a nice error. In the past this actually stack - // overflowed! - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: no matching package found -searched package name: `bar` -perhaps you meant: foo -location searched: [..] -required by package `foo v0.5.0 ([..])` -", - ) - .run(); -} - -#[cargo_test] -fn workspace_produces_rlib() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "top" - version = "0.5.0" - authors = [] - - [workspace] - - [dependencies] - foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file("foo/Cargo.toml", &basic_manifest("foo", "0.5.0")) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - assert!(p.root().join("target/debug/libtop.rlib").is_file()); - assert!(!p.root().join("target/debug/libfoo.rlib").is_file()); -} - -#[cargo_test] -fn deep_path_error() { - // Test for an error loading a path deep in the dependency graph. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - a = {path="a"} - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - [dependencies] - b = {path="../b"} - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - [dependencies] - c = {path="../c"} - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `c` as a dependency of package `b v0.1.0 [..]` - ... which satisfies path dependency `b` of package `a v0.1.0 [..]` - ... which satisfies path dependency `a` of package `foo v0.1.0 [..]` - -Caused by: - failed to load source for dependency `c` - -Caused by: - Unable to update [..]/foo/c - -Caused by: - failed to read `[..]/foo/c/Cargo.toml` - -Caused by: - [..] -", - ) - .run(); -} - -#[cargo_test] -fn catch_tricky_cycle() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "message" - version = "0.1.0" - - [dev-dependencies] - test = { path = "test" } - "#, - ) - .file("src/lib.rs", "") - .file( - "tangle/Cargo.toml", - r#" - [package] - name = "tangle" - version = "0.1.0" - - [dependencies] - message = { path = ".." } - snapshot = { path = "../snapshot" } - "#, - ) - .file("tangle/src/lib.rs", "") - .file( - "snapshot/Cargo.toml", - r#" - [package] - name = "snapshot" - version = "0.1.0" - - [dependencies] - ledger = { path = "../ledger" } - "#, - ) - .file("snapshot/src/lib.rs", "") - .file( - "ledger/Cargo.toml", - r#" - [package] - name = "ledger" - version = "0.1.0" - - [dependencies] - tangle = { path = "../tangle" } - "#, - ) - .file("ledger/src/lib.rs", "") - .file( - "test/Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - - [dependencies] - snapshot = { path = "../snapshot" } - "#, - ) - .file("test/src/lib.rs", "") - .build(); - - p.cargo("test") - .with_stderr_contains("[..]cyclic package dependency[..]") - .with_status(101) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/paths.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/paths.rs deleted file mode 100644 index c5b775865..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/paths.rs +++ /dev/null @@ -1,226 +0,0 @@ -//! Tests for `paths` overrides. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, project}; - -#[cargo_test] -fn broken_path_override_warns() { - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.2.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a1" } - "#, - ) - .file("src/lib.rs", "") - .file( - "a1/Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - "#, - ) - .file("a1/src/lib.rs", "") - .file( - "a2/Cargo.toml", - r#" - [package] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.2" - "#, - ) - .file("a2/src/lib.rs", "") - .file(".cargo/config", r#"paths = ["a2"]"#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] [..] -warning: path override for crate `a` has altered the original list of -dependencies; the dependency on `bar` was either added or -modified to not match the previously resolved version - -This is currently allowed but is known to produce buggy behavior with spurious -recompiles and changes to the crate graph. Path overrides unfortunately were -never intended to support this feature, so for now this message is just a -warning. In the future, however, this message will become a hard error. - -To change the dependency graph via an override it's recommended to use the -`[patch]` feature of Cargo instead of the path override feature. This is -documented online at the url below for more information. - -https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html - -[DOWNLOADING] crates ... -[DOWNLOADED] [..] -[COMPILING] [..] -[COMPILING] [..] -[COMPILING] [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn override_to_path_dep() { - Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - baz = { path = "baz" } - "#, - ) - .file("bar/src/lib.rs", "") - .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("bar/baz/src/lib.rs", "") - .file(".cargo/config", r#"paths = ["bar"]"#) - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn paths_ok_with_optional() { - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = { version = "0.1", optional = true } - "#, - ) - .file("bar/src/lib.rs", "") - .file( - "bar2/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = { version = "0.1", optional = true } - "#, - ) - .file("bar2/src/lib.rs", "") - .file(".cargo/config", r#"paths = ["bar2"]"#) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 ([..]bar2) -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn paths_add_optional_bad() { - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .file( - "bar2/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = { version = "0.1", optional = true } - "#, - ) - .file("bar2/src/lib.rs", "") - .file(".cargo/config", r#"paths = ["bar2"]"#) - .build(); - - p.cargo("build") - .with_stderr_contains( - "\ -warning: path override for crate `bar` has altered the original list of -dependencies; the dependency on `baz` was either added or\ -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/pkgid.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/pkgid.rs deleted file mode 100644 index 5cd4cd41b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/pkgid.rs +++ /dev/null @@ -1,128 +0,0 @@ -//! Tests for the `cargo pkgid` command. - -use cargo_test_support::project; -use cargo_test_support::registry::Package; - -#[cargo_test] -fn simple() { - Package::new("bar", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("generate-lockfile").run(); - - p.cargo("pkgid foo") - .with_stdout(format!("file://[..]{}#0.1.0", p.root().to_str().unwrap())) - .run(); - - p.cargo("pkgid bar") - .with_stdout("https://github.com/rust-lang/crates.io-index#bar:0.1.0") - .run(); -} - -#[cargo_test] -fn suggestion_bad_pkgid() { - Package::new("crates-io", "0.1.0").publish(); - Package::new("two-ver", "0.1.0").publish(); - Package::new("two-ver", "0.2.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - crates-io = "0.1.0" - two-ver = "0.1.0" - two-ver2 = { package = "two-ver", version = "0.2.0" } - "#, - ) - .file("src/lib.rs", "") - .file("cratesio", "") - .build(); - - p.cargo("generate-lockfile").run(); - - // Bad URL. - p.cargo("pkgid https://example.com/crates-io") - .with_status(101) - .with_stderr( - "\ -error: package ID specification `https://example.com/crates-io` did not match any packages -Did you mean one of these? - - crates-io:0.1.0 -", - ) - .run(); - - // Bad name. - p.cargo("pkgid crates_io") - .with_status(101) - .with_stderr( - "\ -error: package ID specification `crates_io` did not match any packages - -Did you mean `crates-io`? -", - ) - .run(); - - // Bad version. - p.cargo("pkgid two-ver:0.3.0") - .with_status(101) - .with_stderr( - "\ -error: package ID specification `two-ver:0.3.0` did not match any packages -Did you mean one of these? - - two-ver:0.1.0 - two-ver:0.2.0 -", - ) - .run(); - - // Bad file URL. - p.cargo("pkgid ./Cargo.toml") - .with_status(101) - .with_stderr( - "\ -error: invalid package ID specification: `./Cargo.toml` - -Caused by: - package ID specification `./Cargo.toml` looks like a file path, maybe try file://[..]/Cargo.toml -", - ) - .run(); - - // Bad file URL with simliar name. - p.cargo("pkgid './cratesio'") - .with_status(101) - .with_stderr( - "\ -error: invalid package ID specification: `./cratesio` - -Did you mean `crates-io`? - -Caused by: - package ID specification `./cratesio` looks like a file path, maybe try file://[..]/cratesio -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/plugins.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/plugins.rs deleted file mode 100644 index e1d9c1da6..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/plugins.rs +++ /dev/null @@ -1,439 +0,0 @@ -//! Tests for rustc plugins. - -use cargo_test_support::{basic_manifest, project}; -use cargo_test_support::{is_nightly, rustc_host}; - -#[cargo_test] -fn plugin_to_the_max() { - if !is_nightly() { - // plugins are unstable - return; - } - - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo_lib" - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/main.rs", - r#" - #![feature(plugin)] - #![plugin(bar)] - extern crate foo_lib; - - fn main() { foo_lib::foo(); } - "#, - ) - .file( - "src/foo_lib.rs", - r#" - #![feature(plugin)] - #![plugin(bar)] - - pub fn foo() {} - "#, - ) - .build(); - let _bar = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - plugin = true - - [dependencies.baz] - path = "../baz" - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(rustc_private)] - - extern crate baz; - extern crate rustc_driver; - - use rustc_driver::plugin::Registry; - - #[no_mangle] - pub fn __rustc_plugin_registrar(_reg: &mut Registry) { - println!("{}", baz::baz()); - } - "#, - ) - .build(); - let _baz = project() - .at("baz") - .file( - "Cargo.toml", - r#" - [package] - name = "baz" - version = "0.0.1" - authors = [] - - [lib] - name = "baz" - crate_type = ["dylib"] - "#, - ) - .file("src/lib.rs", "pub fn baz() -> i32 { 1 }") - .build(); - - foo.cargo("build").run(); - foo.cargo("doc").run(); -} - -#[cargo_test] -fn plugin_with_dynamic_native_dependency() { - if !is_nightly() { - // plugins are unstable - return; - } - - let build = project() - .at("builder") - .file( - "Cargo.toml", - r#" - [package] - name = "builder" - version = "0.0.1" - authors = [] - - [lib] - name = "builder" - crate-type = ["dylib"] - "#, - ) - .file("src/lib.rs", "#[no_mangle] pub extern fn foo() {}") - .build(); - - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - "src/main.rs", - r#" - #![feature(plugin)] - #![plugin(bar)] - - fn main() {} - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - build = 'build.rs' - - [lib] - name = "bar" - plugin = true - "#, - ) - .file( - "bar/build.rs", - r#" - use std::env; - use std::fs; - use std::path::PathBuf; - - fn main() { - let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); - let root = PathBuf::from(env::var("BUILDER_ROOT").unwrap()); - let file = format!("{}builder{}", - env::consts::DLL_PREFIX, - env::consts::DLL_SUFFIX); - let src = root.join(&file); - let dst = out_dir.join(&file); - fs::copy(src, dst).unwrap(); - if cfg!(target_env = "msvc") { - fs::copy(root.join("builder.dll.lib"), - out_dir.join("builder.dll.lib")).unwrap(); - } - println!("cargo:rustc-flags=-L {}", out_dir.display()); - } - "#, - ) - .file( - "bar/src/lib.rs", - r#" - #![feature(rustc_private)] - - extern crate rustc_driver; - use rustc_driver::plugin::Registry; - - #[cfg_attr(not(target_env = "msvc"), link(name = "builder"))] - #[cfg_attr(target_env = "msvc", link(name = "builder.dll"))] - extern { fn foo(); } - - #[no_mangle] - pub fn __rustc_plugin_registrar(_reg: &mut Registry) { - unsafe { foo() } - } - "#, - ) - .build(); - - build.cargo("build").run(); - - let root = build.root().join("target").join("debug"); - foo.cargo("build -v").env("BUILDER_ROOT", root).run(); -} - -#[cargo_test] -fn plugin_integration() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - - [lib] - name = "foo" - plugin = true - doctest = false - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .file("tests/it_works.rs", "") - .build(); - - p.cargo("test -v").run(); -} - -#[cargo_test] -fn doctest_a_plugin() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "#[macro_use] extern crate bar;") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - plugin = true - "#, - ) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("test -v").run(); -} - -// See #1515 -#[cargo_test] -fn native_plugin_dependency_with_custom_linker() { - let target = rustc_host(); - - let _foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - plugin = true - "#, - ) - .file("src/lib.rs", "") - .build(); - - let bar = project() - .at("bar") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies.foo] - path = "../foo" - "#, - ) - .file("src/lib.rs", "") - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - linker = "nonexistent-linker" - "#, - target - ), - ) - .build(); - - bar.cargo("build --verbose") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] -C linker=nonexistent-linker [..]` -[ERROR] [..]linker[..] -", - ) - .run(); -} - -#[cargo_test] -fn panic_abort_plugins() { - if !is_nightly() { - // requires rustc_private - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.dev] - panic = 'abort' - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - plugin = true - "#, - ) - .file( - "bar/src/lib.rs", - r#" - #![feature(rustc_private)] - extern crate rustc_ast; - "#, - ) - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn shared_panic_abort_plugins() { - if !is_nightly() { - // requires rustc_private - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.dev] - panic = 'abort' - - [dependencies] - bar = { path = "bar" } - baz = { path = "baz" } - "#, - ) - .file("src/lib.rs", "extern crate baz;") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - plugin = true - - [dependencies] - baz = { path = "../baz" } - "#, - ) - .file( - "bar/src/lib.rs", - r#" - #![feature(rustc_private)] - extern crate rustc_ast; - extern crate baz; - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.0.1")) - .file("baz/src/lib.rs", "") - .build(); - - p.cargo("build -v").run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/proc_macro.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/proc_macro.rs deleted file mode 100644 index 12ff56284..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/proc_macro.rs +++ /dev/null @@ -1,541 +0,0 @@ -//! Tests for proc-macros. - -use cargo_test_support::is_nightly; -use cargo_test_support::project; - -#[cargo_test] -fn probe_cfg_before_crate_type_discovery() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [target.'cfg(not(stage300))'.dependencies.noop] - path = "../noop" - "#, - ) - .file( - "src/main.rs", - r#" - #[macro_use] - extern crate noop; - - #[derive(Noop)] - struct X; - - fn main() {} - "#, - ) - .build(); - let _noop = project() - .at("noop") - .file( - "Cargo.toml", - r#" - [package] - name = "noop" - version = "0.0.1" - authors = [] - - [lib] - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro_derive(Noop)] - pub fn noop(_input: TokenStream) -> TokenStream { - "".parse().unwrap() - } - "#, - ) - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn noop() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.noop] - path = "../noop" - "#, - ) - .file( - "src/main.rs", - r#" - #[macro_use] - extern crate noop; - - #[derive(Noop)] - struct X; - - fn main() {} - "#, - ) - .build(); - let _noop = project() - .at("noop") - .file( - "Cargo.toml", - r#" - [package] - name = "noop" - version = "0.0.1" - authors = [] - - [lib] - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro_derive(Noop)] - pub fn noop(_input: TokenStream) -> TokenStream { - "".parse().unwrap() - } - "#, - ) - .build(); - - p.cargo("build").run(); - p.cargo("build").run(); -} - -#[cargo_test] -fn impl_and_derive() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.transmogrify] - path = "../transmogrify" - "#, - ) - .file( - "src/main.rs", - r#" - #[macro_use] - extern crate transmogrify; - - trait ImplByTransmogrify { - fn impl_by_transmogrify(&self) -> bool; - } - - #[derive(Transmogrify, Debug)] - struct X { success: bool } - - fn main() { - let x = X::new(); - assert!(x.impl_by_transmogrify()); - println!("{:?}", x); - } - "#, - ) - .build(); - let _transmogrify = project() - .at("transmogrify") - .file( - "Cargo.toml", - r#" - [package] - name = "transmogrify" - version = "0.0.1" - authors = [] - - [lib] - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro_derive(Transmogrify)] - #[doc(hidden)] - pub fn transmogrify(input: TokenStream) -> TokenStream { - " - impl X { - fn new() -> Self { - X { success: true } - } - } - - impl ImplByTransmogrify for X { - fn impl_by_transmogrify(&self) -> bool { - true - } - } - ".parse().unwrap() - } - "#, - ) - .build(); - - p.cargo("build").run(); - p.cargo("run").with_stdout("X { success: true }").run(); -} - -#[cargo_test] -fn plugin_and_proc_macro() { - if !is_nightly() { - // plugins are unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - plugin = true - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - #![feature(rustc_private)] - #![feature(proc_macro, proc_macro_lib)] - - extern crate rustc_driver; - use rustc_driver::plugin::Registry; - - extern crate proc_macro; - use proc_macro::TokenStream; - - #[no_mangle] - pub fn __rustc_plugin_registrar(reg: &mut Registry) {} - - #[proc_macro_derive(Questionable)] - pub fn questionable(input: TokenStream) -> TokenStream { - input - } - "#, - ) - .build(); - - let msg = " `lib.plugin` and `lib.proc-macro` cannot both be `true`"; - p.cargo("build") - .with_status(101) - .with_stderr_contains(msg) - .run(); -} - -#[cargo_test] -fn proc_macro_doctest() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - [lib] - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - #![crate_type = "proc-macro"] - - extern crate proc_macro; - - use proc_macro::TokenStream; - - /// ``` - /// assert!(true); - /// ``` - #[proc_macro_derive(Bar)] - pub fn derive(_input: TokenStream) -> TokenStream { - "".parse().unwrap() - } - - #[test] - fn a() { - assert!(true); - } - "#, - ) - .build(); - - foo.cargo("test") - .with_stdout_contains("test a ... ok") - .with_stdout_contains_n("test [..] ... ok", 2) - .run(); -} - -#[cargo_test] -fn proc_macro_crate_type() { - // Verify that `crate-type = ["proc-macro"]` is the same as `proc-macro = true` - // and that everything, including rustdoc, works correctly. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - pm = { path = "pm" } - "#, - ) - .file( - "src/lib.rs", - r#" - //! ``` - //! use foo::THING; - //! assert_eq!(THING, 123); - //! ``` - #[macro_use] - extern crate pm; - #[derive(MkItem)] - pub struct S; - #[cfg(test)] - mod tests { - use super::THING; - #[test] - fn it_works() { - assert_eq!(THING, 123); - } - } - "#, - ) - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - [lib] - crate-type = ["proc-macro"] - "#, - ) - .file( - "pm/src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro_derive(MkItem)] - pub fn mk_item(_input: TokenStream) -> TokenStream { - "pub const THING: i32 = 123;".parse().unwrap() - } - "#, - ) - .build(); - - foo.cargo("test") - .with_stdout_contains("test tests::it_works ... ok") - .with_stdout_contains_n("test [..] ... ok", 2) - .run(); -} - -#[cargo_test] -fn proc_macro_crate_type_warning() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [lib] - crate-type = ["proc-macro"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - foo.cargo("build") - .with_stderr_contains( - "[WARNING] library `foo` should only specify `proc-macro = true` instead of setting `crate-type`") - .run(); -} - -#[cargo_test] -fn proc_macro_crate_type_warning_plugin() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [lib] - crate-type = ["proc-macro"] - plugin = true - "#, - ) - .file("src/lib.rs", "") - .build(); - - foo.cargo("build") - .with_stderr_contains( - "[WARNING] proc-macro library `foo` should not specify `plugin = true`") - .with_stderr_contains( - "[WARNING] library `foo` should only specify `proc-macro = true` instead of setting `crate-type`") - .run(); -} - -#[cargo_test] -fn proc_macro_crate_type_multiple() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [lib] - crate-type = ["proc-macro", "rlib"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - foo.cargo("build") - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - cannot mix `proc-macro` crate type with others -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn proc_macro_extern_prelude() { - // Check that proc_macro is in the extern prelude. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - [lib] - proc-macro = true - "#, - ) - .file( - "src/lib.rs", - r#" - use proc_macro::TokenStream; - #[proc_macro] - pub fn foo(input: TokenStream) -> TokenStream { - "".parse().unwrap() - } - "#, - ) - .build(); - p.cargo("test").run(); - p.cargo("doc").run(); -} - -#[cargo_test] -fn proc_macro_built_once() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ['a', 'b'] - resolver = "2" - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [build-dependencies] - the-macro = { path = '../the-macro' } - "#, - ) - .file("a/build.rs", "fn main() {}") - .file("a/src/main.rs", "fn main() {}") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [dependencies] - the-macro = { path = '../the-macro', features = ['a'] } - "#, - ) - .file("b/src/main.rs", "fn main() {}") - .file( - "the-macro/Cargo.toml", - r#" - [package] - name = "the-macro" - version = "0.1.0" - - [lib] - proc_macro = true - - [features] - a = [] - "#, - ) - .file("the-macro/src/lib.rs", "") - .build(); - p.cargo("build --verbose") - .with_stderr_unordered( - "\ -[COMPILING] the-macro [..] -[RUNNING] `rustc --crate-name the_macro [..]` -[COMPILING] b [..] -[RUNNING] `rustc --crate-name b [..]` -[COMPILING] a [..] -[RUNNING] `rustc --crate-name build_script_build [..]` -[RUNNING] `[..]build[..]script[..]build[..]` -[RUNNING] `rustc --crate-name a [..]` -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_config.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_config.rs deleted file mode 100644 index 1678a5791..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_config.rs +++ /dev/null @@ -1,458 +0,0 @@ -//! Tests for profiles defined in config files. - -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_lib_manifest, paths, project}; - -#[cargo_test] -fn profile_config_validate_warnings() { - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [profile.test] - opt-level = 3 - - [profile.asdf] - opt-level = 3 - - [profile.dev] - bad-key = true - - [profile.dev.build-override] - bad-key-bo = true - - [profile.dev.package.bar] - bad-key-bar = true - "#, - ) - .build(); - - p.cargo("build") - .with_stderr_unordered( - "\ -[WARNING] unused config key `profile.dev.bad-key` in `[..].cargo/config` -[WARNING] unused config key `profile.dev.package.bar.bad-key-bar` in `[..].cargo/config` -[WARNING] unused config key `profile.dev.build-override.bad-key-bo` in `[..].cargo/config` -[COMPILING] foo [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_config_error_paths() { - // Errors in config show where the error is located. - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [profile.dev] - opt-level = 3 - "#, - ) - .file( - paths::home().join(".cargo/config"), - r#" - [profile.dev] - rpath = "foo" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] error in [..]/foo/.cargo/config: could not load config key `profile.dev` - -Caused by: - error in [..]/home/.cargo/config: `profile.dev.rpath` expected true/false, but found a string -", - ) - .run(); -} - -#[cargo_test] -fn profile_config_validate_errors() { - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [profile.dev.package.foo] - panic = "abort" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] config profile `dev` is not valid (defined in `[..]/foo/.cargo/config`) - -Caused by: - `panic` may not be specified in a `package` profile -", - ) - .run(); -} - -#[cargo_test] -fn profile_config_syntax_errors() { - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [profile.dev] - codegen-units = "foo" - "#, - ) - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] error in [..]/.cargo/config: could not load config key `profile.dev` - -Caused by: - error in [..]/foo/.cargo/config: `profile.dev.codegen-units` expected an integer, but found a string -", - ) - .run(); -} - -#[cargo_test] -fn profile_config_override_spec_multiple() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file( - ".cargo/config", - r#" - [profile.dev.package.bar] - opt-level = 3 - - [profile.dev.package."bar:0.5.0"] - opt-level = 3 - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .build(); - - // Unfortunately this doesn't tell you which file, hopefully it's not too - // much of a problem. - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[ERROR] multiple package overrides in profile `dev` match package `bar v0.5.0 ([..])` -found package specs: bar, bar:0.5.0", - ) - .run(); -} - -#[cargo_test] -fn profile_config_all_options() { - // Ensure all profile options are supported. - let p = project() - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [profile.release] - opt-level = 1 - debug = true - debug-assertions = true - overflow-checks = false - rpath = true - lto = true - codegen-units = 2 - panic = "abort" - incremental = true - "#, - ) - .build(); - - p.cargo("build --release -v") - .env_remove("CARGO_INCREMENTAL") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo [..] \ - -C opt-level=1 \ - -C panic=abort \ - -C lto[..]\ - -C codegen-units=2 \ - -C debuginfo=2 \ - -C debug-assertions=on \ - -C overflow-checks=off [..]\ - -C rpath [..]\ - -C incremental=[..] -[FINISHED] release [optimized + debuginfo] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_config_override_precedence() { - // Config values take precedence over manifest values. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = {path = "bar"} - - [profile.dev] - codegen-units = 2 - - [profile.dev.package.bar] - opt-level = 3 - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .file( - ".cargo/config", - r#" - [profile.dev.package.bar] - opt-level = 2 - "#, - ) - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar [..] -C opt-level=2[..]-C codegen-units=2 [..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo [..]-C codegen-units=2 [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn profile_config_no_warn_unknown_override() { - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [profile.dev.package.bar] - codegen-units = 4 - "#, - ) - .build(); - - p.cargo("build") - .with_stderr_does_not_contain("[..]warning[..]") - .run(); -} - -#[cargo_test] -fn profile_config_mixed_types() { - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [profile.dev] - opt-level = 3 - "#, - ) - .file( - paths::home().join(".cargo/config"), - r#" - [profile.dev] - opt-level = 's' - "#, - ) - .build(); - - p.cargo("build -v") - .with_stderr_contains("[..]-C opt-level=3 [..]") - .run(); -} - -#[cargo_test] -fn named_config_profile() { - // Exercises config named profies. - // foo -> middle -> bar -> dev - // middle exists in Cargo.toml, the others in .cargo/config - use super::config::ConfigBuilder; - use cargo::core::compiler::{CompileKind, CompileMode}; - use cargo::core::profiles::{Profiles, UnitFor}; - use cargo::core::{PackageId, Workspace}; - use cargo::util::interning::InternedString; - use std::fs; - paths::root().join(".cargo").mkdir_p(); - fs::write( - paths::root().join(".cargo/config"), - r#" - [profile.foo] - inherits = "middle" - codegen-units = 2 - [profile.foo.build-override] - codegen-units = 6 - [profile.foo.package.dep] - codegen-units = 7 - - [profile.middle] - inherits = "bar" - codegen-units = 3 - - [profile.bar] - inherits = "dev" - codegen-units = 4 - debug = 1 - "#, - ) - .unwrap(); - fs::write( - paths::root().join("Cargo.toml"), - r#" - [workspace] - - [profile.middle] - inherits = "bar" - codegen-units = 1 - opt-level = 1 - [profile.middle.package.dep] - overflow-checks = false - - [profile.foo.build-override] - codegen-units = 5 - debug-assertions = false - [profile.foo.package.dep] - codegen-units = 8 - "#, - ) - .unwrap(); - let config = ConfigBuilder::new().build(); - let profile_name = InternedString::new("foo"); - let ws = Workspace::new(&paths::root().join("Cargo.toml"), &config).unwrap(); - let profiles = Profiles::new(&ws, profile_name).unwrap(); - - let crates_io = cargo::core::source::SourceId::crates_io(&config).unwrap(); - let a_pkg = PackageId::new("a", "0.1.0", crates_io).unwrap(); - let dep_pkg = PackageId::new("dep", "0.1.0", crates_io).unwrap(); - - // normal package - let mode = CompileMode::Build; - let kind = CompileKind::Host; - let p = profiles.get_profile(a_pkg, true, true, UnitFor::new_normal(), mode, kind); - assert_eq!(p.name, "foo"); - assert_eq!(p.codegen_units, Some(2)); // "foo" from config - assert_eq!(p.opt_level, "1"); // "middle" from manifest - assert_eq!(p.debuginfo, Some(1)); // "bar" from config - assert_eq!(p.debug_assertions, true); // "dev" built-in (ignore build-override) - assert_eq!(p.overflow_checks, true); // "dev" built-in (ignore package override) - - // build-override - let bo = profiles.get_profile(a_pkg, true, true, UnitFor::new_host(false), mode, kind); - assert_eq!(bo.name, "foo"); - assert_eq!(bo.codegen_units, Some(6)); // "foo" build override from config - assert_eq!(bo.opt_level, "0"); // default to zero - assert_eq!(bo.debuginfo, Some(1)); // SAME as normal - assert_eq!(bo.debug_assertions, false); // "foo" build override from manifest - assert_eq!(bo.overflow_checks, true); // SAME as normal - - // package overrides - let po = profiles.get_profile(dep_pkg, false, true, UnitFor::new_normal(), mode, kind); - assert_eq!(po.name, "foo"); - assert_eq!(po.codegen_units, Some(7)); // "foo" package override from config - assert_eq!(po.opt_level, "1"); // SAME as normal - assert_eq!(po.debuginfo, Some(1)); // SAME as normal - assert_eq!(po.debug_assertions, true); // SAME as normal - assert_eq!(po.overflow_checks, false); // "middle" package override from manifest -} - -#[cargo_test] -fn named_env_profile() { - // Environment variables used to define a named profile. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v --profile=other") - .env("CARGO_PROFILE_OTHER_CODEGEN_UNITS", "1") - .env("CARGO_PROFILE_OTHER_INHERITS", "dev") - .with_stderr_contains("[..]-C codegen-units=1 [..]") - .run(); -} - -#[cargo_test] -fn test_with_dev_profile() { - // The `test` profile inherits from `dev` for both local crates and - // dependencies. - Package::new("somedep", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - somedep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("test --lib --no-run -v") - .env("CARGO_PROFILE_DEV_DEBUG", "0") - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] [..] -[DOWNLOADED] [..] -[COMPILING] somedep v1.0.0 -[RUNNING] `rustc --crate-name somedep [..]-C debuginfo=0[..] -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo [..]-C debuginfo=0[..] -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_custom.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_custom.rs deleted file mode 100644 index 889665baf..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_custom.rs +++ /dev/null @@ -1,728 +0,0 @@ -//! Tests for named profiles. - -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::{basic_lib_manifest, project}; - -#[cargo_test] -fn inherits_on_release() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.release] - inherits = "dev" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] `inherits` must not be specified in root profile `release` -", - ) - .run(); -} - -#[cargo_test] -fn missing_inherits() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.release-lto] - codegen-units = 7 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] profile `release-lto` is missing an `inherits` directive \ - (`inherits` is required for all profiles except `dev` or `release`) -", - ) - .run(); -} - -#[cargo_test] -fn invalid_profile_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.'.release-lto'] - inherits = "release" - codegen-units = 7 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at [..] - -Caused by: - invalid character `.` in profile name `.release-lto` - Allowed characters are letters, numbers, underscore, and hyphen. -", - ) - .run(); -} - -#[cargo_test] -#[ignore] // dir-name is currently disabled -fn invalid_dir_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.'release-lto'] - inherits = "release" - dir-name = ".subdir" - codegen-units = 7 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at [..] - -Caused by: - Invalid character `.` in dir-name: `.subdir`", - ) - .run(); -} - -#[cargo_test] -fn dir_name_disabled() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.release-lto] - inherits = "release" - dir-name = "lto" - lto = true - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` - -Caused by: - dir-name=\"lto\" in profile `release-lto` is not currently allowed, \ - directory names are tied to the profile name for custom profiles -", - ) - .run(); -} - -#[cargo_test] -fn invalid_inherits() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.'release-lto'] - inherits = ".release" - codegen-units = 7 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "error: profile `release-lto` inherits from `.release`, \ - but that profile is not defined", - ) - .run(); -} - -#[cargo_test] -fn non_existent_inherits() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.release-lto] - codegen-units = 7 - inherits = "non-existent" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] profile `release-lto` inherits from `non-existent`, but that profile is not defined -", - ) - .run(); -} - -#[cargo_test] -fn self_inherits() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.release-lto] - codegen-units = 7 - inherits = "release-lto" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] profile inheritance loop detected with profile `release-lto` inheriting `release-lto` -", - ) - .run(); -} - -#[cargo_test] -fn inherits_loop() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.release-lto] - codegen-units = 7 - inherits = "release-lto2" - - [profile.release-lto2] - codegen-units = 7 - inherits = "release-lto" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] profile inheritance loop detected with profile `release-lto2` inheriting `release-lto` -", - ) - .run(); -} - -#[cargo_test] -fn overrides_with_custom() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - xxx = {path = "xxx"} - yyy = {path = "yyy"} - - [profile.dev] - codegen-units = 7 - - [profile.dev.package.xxx] - codegen-units = 5 - [profile.dev.package.yyy] - codegen-units = 3 - - [profile.other] - inherits = "dev" - codegen-units = 2 - - [profile.other.package.yyy] - codegen-units = 6 - "#, - ) - .file("src/lib.rs", "") - .file("xxx/Cargo.toml", &basic_lib_manifest("xxx")) - .file("xxx/src/lib.rs", "") - .file("yyy/Cargo.toml", &basic_lib_manifest("yyy")) - .file("yyy/src/lib.rs", "") - .build(); - - // profile overrides are inherited between profiles using inherits and have a - // higher priority than profile options provided by custom profiles - p.cargo("build -v") - .with_stderr_unordered( - "\ -[COMPILING] xxx [..] -[COMPILING] yyy [..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name xxx [..] -C codegen-units=5 [..]` -[RUNNING] `rustc --crate-name yyy [..] -C codegen-units=3 [..]` -[RUNNING] `rustc --crate-name foo [..] -C codegen-units=7 [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - // This also verifies that the custom profile names appears in the finished line. - p.cargo("build --profile=other -v") - .with_stderr_unordered( - "\ -[COMPILING] xxx [..] -[COMPILING] yyy [..] -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name xxx [..] -C codegen-units=5 [..]` -[RUNNING] `rustc --crate-name yyy [..] -C codegen-units=6 [..]` -[RUNNING] `rustc --crate-name foo [..] -C codegen-units=2 [..]` -[FINISHED] other [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn conflicting_usage() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --profile=dev --release") - .with_status(101) - .with_stderr( - "\ -error: conflicting usage of --profile=dev and --release -The `--release` flag is the same as `--profile=release`. -Remove one flag or the other to continue. -", - ) - .run(); - - p.cargo("install --profile=release --debug") - .with_status(101) - .with_stderr( - "\ -error: conflicting usage of --profile=release and --debug -The `--debug` flag is the same as `--profile=dev`. -Remove one flag or the other to continue. -", - ) - .run(); - - p.cargo("rustc --profile=dev --release") - .with_stderr( - "\ -warning: the `--release` flag should not be specified with the `--profile` flag -The `--release` flag will be ignored. -This was historically accepted, but will become an error in a future release. -[COMPILING] foo [..] -[FINISHED] dev [..] -", - ) - .run(); - - p.cargo("check --profile=dev --release") - .with_status(101) - .with_stderr( - "\ -error: conflicting usage of --profile=dev and --release -The `--release` flag is the same as `--profile=release`. -Remove one flag or the other to continue. -", - ) - .run(); - - p.cargo("check --profile=test --release") - .with_stderr( - "\ -warning: the `--release` flag should not be specified with the `--profile` flag -The `--release` flag will be ignored. -This was historically accepted, but will become an error in a future release. -[CHECKING] foo [..] -[FINISHED] test [..] -", - ) - .run(); - - // This is OK since the two are the same. - p.cargo("rustc --profile=release --release") - .with_stderr( - "\ -[COMPILING] foo [..] -[FINISHED] release [..] -", - ) - .run(); - - p.cargo("build --profile=release --release") - .with_stderr( - "\ -[FINISHED] release [..] -", - ) - .run(); - - p.cargo("install --path . --profile=dev --debug") - .with_stderr( - "\ -[INSTALLING] foo [..] -[FINISHED] dev [..] -[INSTALLING] [..] -[INSTALLED] [..] -[WARNING] be sure to add [..] -", - ) - .run(); - - p.cargo("install --path . --profile=release --debug") - .with_status(101) - .with_stderr( - "\ -error: conflicting usage of --profile=release and --debug -The `--debug` flag is the same as `--profile=dev`. -Remove one flag or the other to continue. -", - ) - .run(); -} - -#[cargo_test] -fn clean_custom_dirname() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.other] - inherits = "release" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --release") - .with_stdout("") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run(); - - p.cargo("clean -p foo").masquerade_as_nightly_cargo().run(); - - p.cargo("build --release") - .with_stdout("") - .with_stderr( - "\ -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run(); - - p.cargo("clean -p foo --release").run(); - - p.cargo("build --release") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] release [optimized] target(s) in [..] -", - ) - .run(); - - p.cargo("build") - .with_stdout("") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build --profile=other") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] other [optimized] target(s) in [..] -", - ) - .run(); - - p.cargo("clean").arg("--release").run(); - - // Make sure that 'other' was not cleaned - assert!(p.build_dir().is_dir()); - assert!(p.build_dir().join("debug").is_dir()); - assert!(p.build_dir().join("other").is_dir()); - assert!(!p.build_dir().join("release").is_dir()); - - // This should clean 'other' - p.cargo("clean --profile=other").with_stderr("").run(); - assert!(p.build_dir().join("debug").is_dir()); - assert!(!p.build_dir().join("other").is_dir()); -} - -#[cargo_test] -fn unknown_profile() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --profile alpha") - .with_stderr("[ERROR] profile `alpha` is not defined") - .with_status(101) - .run(); - // Clean has a separate code path, need to check it too. - p.cargo("clean --profile alpha") - .with_stderr("[ERROR] profile `alpha` is not defined") - .with_status(101) - .run(); -} - -#[cargo_test] -fn reserved_profile_names() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.doc] - opt-level = 1 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --profile=doc") - .with_status(101) - .with_stderr("error: profile `doc` is reserved and not allowed to be explicitly specified") - .run(); - // Not an exhaustive list, just a sample. - for name in ["build", "cargo", "check", "rustc", "CaRgO_startswith"] { - p.cargo(&format!("build --profile={}", name)) - .with_status(101) - .with_stderr(&format!( - "\ -error: profile name `{}` is reserved -Please choose a different name. -See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles. -", - name - )) - .run(); - } - for name in ["build", "check", "cargo", "rustc", "CaRgO_startswith"] { - p.change_file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.{}] - opt-level = 1 - "#, - name - ), - ); - - p.cargo("build") - .with_status(101) - .with_stderr(&format!( - "\ -error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` - -Caused by: - profile name `{}` is reserved - Please choose a different name. - See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles. -", - name - )) - .run(); - } - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [profile.debug] - debug = 1 - inherits = "dev" - "#, - ); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` - -Caused by: - profile name `debug` is reserved - To configure the default development profile, use the name `dev` as in [profile.dev] - See https://doc.rust-lang.org/cargo/reference/profiles.html for more on configuring profiles. -", - ) - .run(); -} - -#[cargo_test] -fn legacy_commands_support_custom() { - // These commands have had `--profile` before custom named profiles. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.super-dev] - codegen-units = 3 - inherits = "dev" - "#, - ) - .file("src/lib.rs", "") - .build(); - - for command in ["rustc", "fix", "check"] { - let mut pb = p.cargo(command); - if command == "fix" { - pb.arg("--allow-no-vcs"); - } - pb.arg("--profile=super-dev") - .arg("-v") - .with_stderr_contains("[RUNNING] [..]codegen-units=3[..]") - .run(); - p.build_dir().rm_rf(); - } -} - -#[cargo_test] -fn legacy_rustc() { - // `cargo rustc` historically has supported dev/test/bench/check - // other profiles are covered in check::rustc_check - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.dev] - codegen-units = 3 - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("rustc --profile dev -v") - .with_stderr( - "\ -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo [..]-C codegen-units=3[..] -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_overrides.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_overrides.rs deleted file mode 100644 index 661fada49..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_overrides.rs +++ /dev/null @@ -1,515 +0,0 @@ -//! Tests for profile overrides (build-override and per-package overrides). - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_lib_manifest, basic_manifest, project}; - -#[cargo_test] -fn profile_override_basic() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = {path = "bar"} - - [profile.dev] - opt-level = 1 - - [profile.dev.package.bar] - opt-level = 3 - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "[COMPILING] bar [..] -[RUNNING] `rustc --crate-name bar [..] -C opt-level=3 [..]` -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo [..] -C opt-level=1 [..]` -[FINISHED] dev [optimized + debuginfo] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn profile_override_warnings() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = {path = "bar"} - - [profile.dev.package.bart] - opt-level = 3 - - [profile.dev.package.no-suggestion] - opt-level = 3 - - [profile.dev.package."bar:1.2.3"] - opt-level = 3 - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr_contains( - "\ -[WARNING] profile package spec `bar:1.2.3` in profile `dev` \ - has a version or URL that does not match any of the packages: \ - bar v0.5.0 ([..]/foo/bar) -[WARNING] profile package spec `bart` in profile `dev` did not match any packages - -Did you mean `bar`? -[WARNING] profile package spec `no-suggestion` in profile `dev` did not match any packages -[COMPILING] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_override_bad_settings() { - let bad_values = [ - ( - "panic = \"abort\"", - "`panic` may not be specified in a `package` profile", - ), - ( - "lto = true", - "`lto` may not be specified in a `package` profile", - ), - ( - "rpath = true", - "`rpath` may not be specified in a `package` profile", - ), - ("package = {}", "package-specific profiles cannot be nested"), - ]; - for &(snippet, expected) in bad_values.iter() { - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = {{path = "bar"}} - - [profile.dev.package.bar] - {} - "#, - snippet - ), - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains(format!("Caused by:\n {}", expected)) - .run(); - } -} - -#[cargo_test] -fn profile_override_hierarchy() { - // Test that the precedence rules are correct for different types. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["m1", "m2", "m3"] - - [profile.dev] - codegen-units = 1 - - [profile.dev.package.m2] - codegen-units = 2 - - [profile.dev.package."*"] - codegen-units = 3 - - [profile.dev.build-override] - codegen-units = 4 - "#, - ) - // m1 - .file( - "m1/Cargo.toml", - r#" - [package] - name = "m1" - version = "0.0.1" - - [dependencies] - m2 = { path = "../m2" } - dep = { path = "../../dep" } - "#, - ) - .file("m1/src/lib.rs", "extern crate m2; extern crate dep;") - .file("m1/build.rs", "fn main() {}") - // m2 - .file( - "m2/Cargo.toml", - r#" - [package] - name = "m2" - version = "0.0.1" - - [dependencies] - m3 = { path = "../m3" } - - [build-dependencies] - m3 = { path = "../m3" } - dep = { path = "../../dep" } - "#, - ) - .file("m2/src/lib.rs", "extern crate m3;") - .file( - "m2/build.rs", - "extern crate m3; extern crate dep; fn main() {}", - ) - // m3 - .file("m3/Cargo.toml", &basic_lib_manifest("m3")) - .file("m3/src/lib.rs", "") - .build(); - - // dep (outside of workspace) - let _dep = project() - .at("dep") - .file("Cargo.toml", &basic_lib_manifest("dep")) - .file("src/lib.rs", "") - .build(); - - // Profiles should be: - // m3: 4 (as build.rs dependency) - // m3: 1 (as [profile.dev] as workspace member) - // dep: 3 (as [profile.dev.package."*"] as non-workspace member) - // m1 build.rs: 4 (as [profile.dev.build-override]) - // m2 build.rs: 2 (as [profile.dev.package.m2]) - // m2: 2 (as [profile.dev.package.m2]) - // m1: 1 (as [profile.dev]) - - p.cargo("build -v").with_stderr_unordered("\ -[COMPILING] m3 [..] -[COMPILING] dep [..] -[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=4 [..] -[RUNNING] `rustc --crate-name dep [..]dep/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=3 [..] -[RUNNING] `rustc --crate-name m3 m3/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=1 [..] -[RUNNING] `rustc --crate-name build_script_build m1/build.rs [..] --crate-type bin --emit=[..]link[..]-C codegen-units=4 [..] -[COMPILING] m2 [..] -[RUNNING] `rustc --crate-name build_script_build m2/build.rs [..] --crate-type bin --emit=[..]link[..]-C codegen-units=2 [..] -[RUNNING] `[..]/m1-[..]/build-script-build` -[RUNNING] `[..]/m2-[..]/build-script-build` -[RUNNING] `rustc --crate-name m2 m2/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=2 [..] -[COMPILING] m1 [..] -[RUNNING] `rustc --crate-name m1 m1/src/lib.rs [..] --crate-type lib --emit=[..]link[..]-C codegen-units=1 [..] -[FINISHED] dev [unoptimized + debuginfo] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_override_spec_multiple() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { path = "bar" } - - [profile.dev.package.bar] - opt-level = 3 - - [profile.dev.package."bar:0.5.0"] - opt-level = 3 - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains( - "\ -[ERROR] multiple package overrides in profile `dev` match package `bar v0.5.0 ([..])` -found package specs: bar, bar:0.5.0", - ) - .run(); -} - -#[cargo_test] -fn profile_override_spec() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["m1", "m2"] - - [profile.dev.package."dep:1.0.0"] - codegen-units = 1 - - [profile.dev.package."dep:2.0.0"] - codegen-units = 2 - "#, - ) - // m1 - .file( - "m1/Cargo.toml", - r#" - [package] - name = "m1" - version = "0.0.1" - - [dependencies] - dep = { path = "../../dep1" } - "#, - ) - .file("m1/src/lib.rs", "extern crate dep;") - // m2 - .file( - "m2/Cargo.toml", - r#" - [package] - name = "m2" - version = "0.0.1" - - [dependencies] - dep = {path = "../../dep2" } - "#, - ) - .file("m2/src/lib.rs", "extern crate dep;") - .build(); - - project() - .at("dep1") - .file("Cargo.toml", &basic_manifest("dep", "1.0.0")) - .file("src/lib.rs", "") - .build(); - - project() - .at("dep2") - .file("Cargo.toml", &basic_manifest("dep", "2.0.0")) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr_contains("[RUNNING] `rustc [..]dep1/src/lib.rs [..] -C codegen-units=1 [..]") - .with_stderr_contains("[RUNNING] `rustc [..]dep2/src/lib.rs [..] -C codegen-units=2 [..]") - .run(); -} - -#[cargo_test] -fn override_proc_macro() { - Package::new("shared", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - shared = "1.0" - pm = {path = "pm"} - - [profile.dev.build-override] - codegen-units = 4 - "#, - ) - .file("src/lib.rs", r#"pm::eat!{}"#) - .file( - "pm/Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - - [lib] - proc-macro = true - - [dependencies] - shared = "1.0" - "#, - ) - .file( - "pm/src/lib.rs", - r#" - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro] - pub fn eat(_item: TokenStream) -> TokenStream { - "".parse().unwrap() - } - "#, - ) - .build(); - - p.cargo("build -v") - // Shared built for the proc-macro. - .with_stderr_contains("[RUNNING] `rustc [..]--crate-name shared [..]-C codegen-units=4[..]") - // Shared built for the library. - .with_stderr_line_without( - &["[RUNNING] `rustc --crate-name shared"], - &["-C codegen-units"], - ) - .with_stderr_contains("[RUNNING] `rustc [..]--crate-name pm [..]-C codegen-units=4[..]") - .with_stderr_line_without( - &["[RUNNING] `rustc [..]--crate-name foo"], - &["-C codegen-units"], - ) - .run(); -} - -#[cargo_test] -fn no_warning_ws() { - // https://github.com/rust-lang/cargo/issues/7378, avoid warnings in a workspace. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - - [profile.dev.package.a] - codegen-units = 3 - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", "") - .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("build -p b") - .with_stderr( - "\ -[COMPILING] b [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_override_shared() { - // A dependency with a build script that is shared with a build - // dependency, using different profile settings. That is: - // - // foo DEBUG=2 - // โ”œโ”€โ”€ common DEBUG=2 - // โ”‚ โ””โ”€โ”€ common Run build.rs DEBUG=2 - // โ”‚ โ””โ”€โ”€ common build.rs DEBUG=0 (build_override) - // โ””โ”€โ”€ foo Run build.rs DEBUG=2 - // โ””โ”€โ”€ foo build.rs DEBUG=0 (build_override) - // โ””โ”€โ”€ common DEBUG=0 (build_override) - // โ””โ”€โ”€ common Run build.rs DEBUG=0 (build_override) - // โ””โ”€โ”€ common build.rs DEBUG=0 (build_override) - // - // The key part here is that `common` RunCustomBuild is run twice, once - // with DEBUG=2 (as a dependency of foo) and once with DEBUG=0 (as a - // build-dependency of foo's build script). - Package::new("common", "1.0.0") - .file( - "build.rs", - r#" - fn main() { - if std::env::var("DEBUG").unwrap() != "false" { - println!("cargo:rustc-cfg=foo_debug"); - } else { - println!("cargo:rustc-cfg=foo_release"); - } - } - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn foo() -> u32 { - if cfg!(foo_debug) { - assert!(cfg!(debug_assertions)); - 1 - } else if cfg!(foo_release) { - assert!(!cfg!(debug_assertions)); - 2 - } else { - panic!("not set"); - } - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [build-dependencies] - common = "1.0" - - [dependencies] - common = "1.0" - - [profile.dev.build-override] - debug = 0 - debug-assertions = false - "#, - ) - .file( - "build.rs", - r#" - fn main() { - assert_eq!(common::foo(), 2); - } - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - assert_eq!(common::foo(), 1); - } - "#, - ) - .build(); - - p.cargo("run").run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_targets.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_targets.rs deleted file mode 100644 index 74d54385f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profile_targets.rs +++ /dev/null @@ -1,658 +0,0 @@ -//! Tests for checking exactly how profiles correspond with each unit. For -//! example, the `test` profile applying to test targets, but not other -//! targets, etc. - -use cargo_test_support::{basic_manifest, project, Project}; - -fn all_target_project() -> Project { - // This abuses the `codegen-units` setting so that we can verify exactly - // which profile is used for each compiler invocation. - project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { path = "bar" } - - [build-dependencies] - bdep = { path = "bdep" } - - [profile.dev] - codegen-units = 1 - panic = "abort" - [profile.release] - codegen-units = 2 - panic = "abort" - [profile.test] - codegen-units = 3 - [profile.bench] - codegen-units = 4 - [profile.dev.build-override] - codegen-units = 5 - [profile.release.build-override] - codegen-units = 6 - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file("src/main.rs", "extern crate foo; fn main() {}") - .file("examples/ex1.rs", "extern crate foo; fn main() {}") - .file("tests/test1.rs", "extern crate foo;") - .file("benches/bench1.rs", "extern crate foo;") - .file( - "build.rs", - r#" - extern crate bdep; - fn main() { - eprintln!("foo custom build PROFILE={} DEBUG={} OPT_LEVEL={}", - std::env::var("PROFILE").unwrap(), - std::env::var("DEBUG").unwrap(), - std::env::var("OPT_LEVEL").unwrap(), - ); - } - "#, - ) - // `bar` package. - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - // `bdep` package. - .file( - "bdep/Cargo.toml", - r#" - [package] - name = "bdep" - version = "0.0.1" - - [dependencies] - bar = { path = "../bar" } - "#, - ) - .file("bdep/src/lib.rs", "extern crate bar;") - .build() -} - -#[cargo_test] -fn profile_selection_build() { - let p = all_target_project(); - - // `build` - // NOTES: - // - bdep `panic` is not set because it thinks `build.rs` is a plugin. - // - build_script_build is built without panic because it thinks `build.rs` is a plugin. - p.cargo("build -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[COMPILING] bdep [..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[FINISHED] dev [unoptimized + debuginfo] [..] -").run(); - p.cargo("build -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] dev [unoptimized + debuginfo] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_build_release() { - let p = all_target_project(); - - // `build --release` - p.cargo("build --release -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[COMPILING] bdep [..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] -[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[FINISHED] release [optimized] [..] -").run(); - p.cargo("build --release -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] release [optimized] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_build_all_targets() { - let p = all_target_project(); - // `build` - // NOTES: - // - bdep `panic` is not set because it thinks `build.rs` is a plugin. - // - build_script_build is built without panic because it thinks - // `build.rs` is a plugin. - // - Benchmark dependencies are compiled in `dev` mode, which may be - // surprising. See issue rust-lang/cargo#4929. - // - // - Dependency profiles: - // Pkg Target Profile Reason - // --- ------ ------- ------ - // bar lib dev For foo-bin - // bar lib dev-panic For tests/benches and bdep - // bdep lib dev-panic For foo build.rs - // foo custom dev-panic - // - // - `foo` target list is: - // Target Profile Mode - // ------ ------- ---- - // lib dev+panic build (a normal lib target) - // lib dev-panic build (used by tests/benches) - // lib dev dev - // test dev dev - // bench dev dev - // bin dev dev - // bin dev build - // example dev build - p.cargo("build --all-targets -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[COMPILING] bdep [..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]` -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]` -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 [..]` -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]` -[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]` -[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link[..]-C codegen-units=1 -C debuginfo=2 --test [..]` -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]` -[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..]` -[FINISHED] dev [unoptimized + debuginfo] [..] -").run(); - p.cargo("build -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] dev [unoptimized + debuginfo] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_build_all_targets_release() { - let p = all_target_project(); - // `build --all-targets --release` - // NOTES: - // - bdep `panic` is not set because it thinks `build.rs` is a plugin. - // - bar compiled twice. It tries with and without panic, but the "is a - // plugin" logic is forcing it to be cleared. - // - build_script_build is built without panic because it thinks - // `build.rs` is a plugin. - // - build_script_build is being run two times. Once for the `dev` and - // `test` targets, once for the `bench` targets. - // TODO: "PROFILE" says debug both times, though! - // - // - Dependency profiles: - // Pkg Target Profile Reason - // --- ------ ------- ------ - // bar lib release For foo-bin - // bar lib release-panic For tests/benches and bdep - // bdep lib release-panic For foo build.rs - // foo custom release-panic - // - // - `foo` target list is: - // Target Profile Mode - // ------ ------- ---- - // lib release+panic build (a normal lib target) - // lib release-panic build (used by tests/benches) - // lib release test (bench/test de-duped) - // test release test - // bench release test - // bin release test (bench/test de-duped) - // bin release build - // example release build - p.cargo("build --all-targets --release -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[COMPILING] bdep [..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] -[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]` -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]` -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..]` -[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]` -[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]` -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..]` -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]` -[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..]` -[FINISHED] release [optimized] [..] -").run(); - p.cargo("build --all-targets --release -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] release [optimized] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_test() { - let p = all_target_project(); - // `test` - // NOTES: - // - Dependency profiles: - // Pkg Target Profile Reason - // --- ------ ------- ------ - // bar lib test For foo-bin - // bar lib test-panic For tests/benches and bdep - // bdep lib test-panic For foo build.rs - // foo custom test-panic - // - // - `foo` target list is: - // Target Profile Mode - // ------ ------- ---- - // lib test-panic build (for tests) - // lib test build (for bins) - // lib test test - // test test test - // example test-panic build - // bin test test - // bin test build - // - p.cargo("test -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..] -[COMPILING] bdep [..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..]/target/debug/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]-C codegen-units=3 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C panic=abort[..]-C codegen-units=3 -C debuginfo=2 [..] -[FINISHED] test [unoptimized + debuginfo] [..] -[RUNNING] `[..]/deps/foo-[..]` -[RUNNING] `[..]/deps/foo-[..]` -[RUNNING] `[..]/deps/test1-[..]` -[DOCTEST] foo -[RUNNING] `rustdoc [..]--test [..] -").run(); - p.cargo("test -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] test [unoptimized + debuginfo] [..] -[RUNNING] `[..]/deps/foo-[..]` -[RUNNING] `[..]/deps/foo-[..]` -[RUNNING] `[..]/deps/test1-[..]` -[DOCTEST] foo -[RUNNING] `rustdoc [..]--test [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_test_release() { - let p = all_target_project(); - - // `test --release` - // NOTES: - // - Dependency profiles: - // Pkg Target Profile Reason - // --- ------ ------- ------ - // bar lib release For foo-bin - // bar lib release-panic For tests/benches and bdep - // bdep lib release-panic For foo build.rs - // foo custom release-panic - // - // - `foo` target list is: - // Target Profile Mode - // ------ ------- ---- - // lib release-panic build (for tests) - // lib release build (for bins) - // lib release test - // test release test - // example release-panic build - // bin release test - // bin release build - // - p.cargo("test --release -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C opt-level=3[..]-C codegen-units=2[..] -[COMPILING] bdep [..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] -[RUNNING] `[..]/target/release/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..] -[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=2 --test [..] -[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]link -C opt-level=3[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[FINISHED] release [optimized] [..] -[RUNNING] `[..]/deps/foo-[..]` -[RUNNING] `[..]/deps/foo-[..]` -[RUNNING] `[..]/deps/test1-[..]` -[DOCTEST] foo -[RUNNING] `rustdoc [..]--test [..]` -").run(); - p.cargo("test --release -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] release [optimized] [..] -[RUNNING] `[..]/deps/foo-[..]` -[RUNNING] `[..]/deps/foo-[..]` -[RUNNING] `[..]/deps/test1-[..]` -[DOCTEST] foo -[RUNNING] `rustdoc [..]--test [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_bench() { - let p = all_target_project(); - - // `bench` - // NOTES: - // - Dependency profiles: - // Pkg Target Profile Reason - // --- ------ ------- ------ - // bar lib bench For foo-bin - // bar lib bench-panic For tests/benches and bdep - // bdep lib bench-panic For foo build.rs - // foo custom bench-panic - // - // - `foo` target list is: - // Target Profile Mode - // ------ ------- ---- - // lib bench-panic build (for benches) - // lib bench build (for bins) - // lib bench test(bench) - // bench bench test(bench) - // bin bench test(bench) - // bin bench build - // - p.cargo("bench -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=4 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[COMPILING] bdep [..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] -[RUNNING] `[..]target/release/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link -C opt-level=3[..]-C codegen-units=4 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..] -[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]link -C opt-level=3[..]-C codegen-units=4 --test [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]link -C opt-level=3 -C panic=abort[..]-C codegen-units=4 [..] -[FINISHED] bench [optimized] [..] -[RUNNING] `[..]/deps/foo-[..] --bench` -[RUNNING] `[..]/deps/foo-[..] --bench` -[RUNNING] `[..]/deps/bench1-[..] --bench` -").run(); - p.cargo("bench -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] bench [optimized] [..] -[RUNNING] `[..]/deps/foo-[..] --bench` -[RUNNING] `[..]/deps/foo-[..] --bench` -[RUNNING] `[..]/deps/bench1-[..] --bench` -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_check_all_targets() { - let p = all_target_project(); - // `check` - // NOTES: - // - Dependency profiles: - // Pkg Target Profile Action Reason - // --- ------ ------- ------ ------ - // bar lib dev* link For bdep - // bar lib dev-panic metadata For tests/benches - // bar lib dev metadata For lib/bins - // bdep lib dev* link For foo build.rs - // foo custom dev* link For build.rs - // - // `*` = wants panic, but it is cleared when args are built. - // - // - foo target list is: - // Target Profile Mode - // ------ ------- ---- - // lib dev check - // lib dev-panic check (for tests/benches) - // lib dev-panic check-test (checking lib as a unittest) - // example dev check - // test dev-panic check-test - // bench dev-panic check-test - // bin dev check - // bin dev-panic check-test (checking bin as a unittest) - // - p.cargo("check --all-targets -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[COMPILING] bdep[..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata[..]-C codegen-units=1 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[FINISHED] dev [unoptimized + debuginfo] [..] -").run(); - // Starting with Rust 1.27, rustc emits `rmeta` files for bins, so - // everything should be completely fresh. Previously, bins were being - // rechecked. - // See PR rust-lang/rust#49289 and issue rust-lang/cargo#3624. - p.cargo("check --all-targets -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] dev [unoptimized + debuginfo] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_check_all_targets_release() { - let p = all_target_project(); - // `check --release` - // See issue rust-lang/cargo#5218. - // This is a pretty straightforward variant of - // `profile_selection_check_all_targets` that uses `release` instead of - // `dev` for all targets. - p.cargo("check --all-targets --release -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=6 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[COMPILING] bdep[..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link [..]-C codegen-units=6 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=6 [..] -[RUNNING] `[..]target/release/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=release DEBUG=false OPT_LEVEL=3 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..] -[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..] -[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata -C opt-level=3[..]-C codegen-units=2 --test [..] -[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--crate-type bin --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--crate-type bin --emit=[..]metadata -C opt-level=3 -C panic=abort[..]-C codegen-units=2 [..] -[FINISHED] release [optimized] [..] -").run(); - - p.cargo("check --all-targets --release -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] release [optimized] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_check_all_targets_test() { - let p = all_target_project(); - - // `check --profile=test` - // - Dependency profiles: - // Pkg Target Profile Action Reason - // --- ------ ------- ------ ------ - // bar lib test* link For bdep - // bar lib test-panic metdata For tests/benches - // bdep lib test* link For foo build.rs - // foo custom test* link For build.rs - // - // `*` = wants panic, but it is cleared when args are built. - // - // - foo target list is: - // Target Profile Mode - // ------ ------- ---- - // lib test-panic check-test (for tests/benches) - // lib test-panic check-test (checking lib as a unittest) - // example test-panic check-test - // test test-panic check-test - // bench test-panic check-test - // bin test-panic check-test - // - p.cargo("check --all-targets --profile=test -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 [..] -[COMPILING] bdep[..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 [..] -[RUNNING] `[..] rustc --crate-name foo src/lib.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name test1 tests/test1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name foo src/main.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name bench1 benches/bench1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] -[RUNNING] `[..] rustc --crate-name ex1 examples/ex1.rs [..]--emit=[..]metadata[..]-C codegen-units=3 -C debuginfo=2 --test [..] -[FINISHED] test [unoptimized + debuginfo] [..] -").run(); - - p.cargo("check --all-targets --profile=test -vv") - .with_stderr_unordered( - "\ -[FRESH] bar [..] -[FRESH] bdep [..] -[FRESH] foo [..] -[FINISHED] test [unoptimized + debuginfo] [..] -", - ) - .run(); -} - -#[cargo_test] -fn profile_selection_doc() { - let p = all_target_project(); - // `doc` - // NOTES: - // - Dependency profiles: - // Pkg Target Profile Action Reason - // --- ------ ------- ------ ------ - // bar lib dev* link For bdep - // bar lib dev metadata For rustdoc - // bdep lib dev* link For foo build.rs - // foo custom dev* link For build.rs - // - // `*` = wants panic, but it is cleared when args are built. - p.cargo("doc -vv").with_stderr_unordered("\ -[COMPILING] bar [..] -[DOCUMENTING] bar [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `rustdoc [..]--crate-name bar bar/src/lib.rs [..] -[RUNNING] `[..] rustc --crate-name bar bar/src/lib.rs [..]--crate-type lib --emit=[..]metadata -C panic=abort[..]-C codegen-units=1 -C debuginfo=2 [..] -[COMPILING] bdep [..] -[RUNNING] `[..] rustc --crate-name bdep bdep/src/lib.rs [..]--crate-type lib --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[COMPILING] foo [..] -[RUNNING] `[..] rustc --crate-name build_script_build build.rs [..]--crate-type bin --emit=[..]link[..]-C codegen-units=5 -C debuginfo=2 [..] -[RUNNING] `[..]target/debug/build/foo-[..]/build-script-build` -[foo 0.0.1] foo custom build PROFILE=debug DEBUG=true OPT_LEVEL=0 -[DOCUMENTING] foo [..] -[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..] -[FINISHED] dev [unoptimized + debuginfo] [..] -").run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profiles.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profiles.rs deleted file mode 100644 index 4f5ed7664..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/profiles.rs +++ /dev/null @@ -1,597 +0,0 @@ -//! Tests for profiles. - -use std::env; - -use cargo_test_support::{is_nightly, project}; - -#[cargo_test] -fn profile_overrides() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [profile.dev] - opt-level = 1 - debug = false - rpath = true - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] test v0.0.0 ([CWD]) -[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]\ - -C opt-level=1[..]\ - -C debug-assertions=on \ - -C metadata=[..] \ - -C rpath \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[FINISHED] dev [optimized] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn opt_level_override_0() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [profile.dev] - opt-level = 0 - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] test v0.0.0 ([CWD]) -[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]\ - -C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[FINISHED] [..] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn debug_override_1() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.0.0" - authors = [] - - [profile.dev] - debug = 1 - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] test v0.0.0 ([CWD]) -[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]\ - -C debuginfo=1 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[FINISHED] [..] target(s) in [..] -", - ) - .run(); -} - -fn check_opt_level_override(profile_level: &str, rustc_level: &str) { - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [profile.dev] - opt-level = {level} - "#, - level = profile_level - ), - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build -v") - .with_stderr(&format!( - "\ -[COMPILING] test v0.0.0 ([CWD]) -[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ - --emit=[..]link \ - -C opt-level={level}[..]\ - -C debuginfo=2 \ - -C debug-assertions=on \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[FINISHED] [..] target(s) in [..] -", - level = rustc_level - )) - .run(); -} - -#[cargo_test] -fn opt_level_overrides() { - for &(profile_level, rustc_level) in &[ - ("1", "1"), - ("2", "2"), - ("3", "3"), - ("\"s\"", "s"), - ("\"z\"", "z"), - ] { - check_opt_level_override(profile_level, rustc_level) - } -} - -#[cargo_test] -fn top_level_overrides_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - - name = "test" - version = "0.0.0" - authors = [] - - [profile.release] - opt-level = 1 - debug = true - - [dependencies.foo] - path = "foo" - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - - name = "foo" - version = "0.0.0" - authors = [] - - [profile.release] - opt-level = 0 - debug = false - - [lib] - name = "foo" - crate_type = ["dylib", "rlib"] - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - p.cargo("build -v --release") - .with_stderr(&format!( - "\ -[COMPILING] foo v0.0.0 ([CWD]/foo) -[RUNNING] `rustc --crate-name foo foo/src/lib.rs [..]\ - --crate-type dylib --crate-type rlib \ - --emit=[..]link \ - -C prefer-dynamic \ - -C opt-level=1[..]\ - -C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [CWD]/target/release/deps \ - -L dependency=[CWD]/target/release/deps` -[COMPILING] test v0.0.0 ([CWD]) -[RUNNING] `rustc --crate-name test src/lib.rs [..]--crate-type lib \ - --emit=[..]link \ - -C opt-level=1[..]\ - -C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/release/deps \ - --extern foo=[CWD]/target/release/deps/\ - {prefix}foo[..]{suffix} \ - --extern foo=[CWD]/target/release/deps/libfoo.rlib` -[FINISHED] release [optimized + debuginfo] target(s) in [..] -", - prefix = env::consts::DLL_PREFIX, - suffix = env::consts::DLL_SUFFIX - )) - .run(); -} - -#[cargo_test] -fn profile_in_non_root_manifest_triggers_a_warning() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - - [profile.dev] - debug = false - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = ".." - - [profile.dev] - opt-level = 1 - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .cwd("bar") - .with_stderr( - "\ -[WARNING] profiles for the non root package will be ignored, specify profiles at the workspace root: -package: [..] -workspace: [..] -[COMPILING] bar v0.1.0 ([..]) -[RUNNING] `rustc [..]` -[FINISHED] dev [unoptimized] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn profile_in_virtual_manifest_works() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - - [profile.dev] - opt-level = 1 - debug = false - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = ".." - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v") - .cwd("bar") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 ([..]) -[RUNNING] `rustc [..]` -[FINISHED] dev [optimized] target(s) in [..]", - ) - .run(); -} - -#[cargo_test] -fn profile_panic_test_bench() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [profile.test] - panic = "abort" - - [profile.bench] - panic = "abort" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr_contains( - "\ -[WARNING] `panic` setting is ignored for `bench` profile -[WARNING] `panic` setting is ignored for `test` profile -", - ) - .run(); -} - -#[cargo_test] -fn profile_doc_deprecated() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [profile.doc] - opt-level = 0 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr_contains("[WARNING] profile `doc` is deprecated and has no effect") - .run(); -} - -#[cargo_test] -fn panic_unwind_does_not_build_twice() { - // Check for a bug where `lib` was built twice, once with panic set and - // once without. Since "unwind" is the default, they are the same and - // should only be built once. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.dev] - panic = "unwind" - "#, - ) - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .file("tests/t1.rs", "") - .build(); - - p.cargo("test -v --tests --no-run") - .with_stderr_unordered( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] -[RUNNING] `rustc --crate-name foo src/lib.rs [..] --test [..] -[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin [..] -[RUNNING] `rustc --crate-name foo src/main.rs [..] --test [..] -[RUNNING] `rustc --crate-name t1 tests/t1.rs [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn debug_0_report() { - // The finished line handles 0 correctly. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.dev] - debug = 0 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.1.0 [..] -[RUNNING] `rustc --crate-name foo src/lib.rs [..]-C debuginfo=0 [..] -[FINISHED] dev [unoptimized] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn thin_lto_works() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "top" - version = "0.5.0" - authors = [] - - [profile.release] - lto = 'thin' - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --release -v") - .with_stderr( - "\ -[COMPILING] top [..] -[RUNNING] `rustc [..] -C lto=thin [..]` -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn strip_works() { - if !is_nightly() { - // rustc 1.58 stabilized -C strip; disable the test until that ships. - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.release] - strip = 'symbols' - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --release -v") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc [..] -C strip=symbols [..]` -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn strip_passes_unknown_option_to_rustc() { - if !is_nightly() { - // rustc 1.58 stabilized -C strip; disable the test until that ships. - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.release] - strip = 'unknown' - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --release -v") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc [..] -C strip=unknown [..]` -error: incorrect value `unknown` for [..] `strip` [..] was expected -", - ) - .run(); -} - -#[cargo_test] -fn strip_accepts_true_to_strip_symbols() { - if !is_nightly() { - // rustc 1.58 stabilized -C strip; disable the test until that ships. - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.release] - strip = true - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --release -v") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc [..] -C strip=symbols [..]` -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn strip_accepts_false_to_disable_strip() { - if !is_nightly() { - // rustc 1.58 stabilized -C strip; disable the test until that ships. - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [profile.release] - strip = false - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --release -v") - .with_stderr_does_not_contain("-C strip") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/progress.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/progress.rs deleted file mode 100644 index f5845d2e0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/progress.rs +++ /dev/null @@ -1,159 +0,0 @@ -//! Tests for progress bar. - -use cargo_test_support::project; -use cargo_test_support::registry::Package; - -#[cargo_test] -fn bad_progress_config_unknown_when() { - let p = project() - .file( - ".cargo/config", - r#" - [term] - progress = { when = 'unknown' } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] error in [..].cargo/config: \ -could not load config key `term.progress.when` - -Caused by: - unknown variant `unknown`, expected one of `auto`, `never`, `always` -", - ) - .run(); -} - -#[cargo_test] -fn bad_progress_config_missing_width() { - let p = project() - .file( - ".cargo/config", - r#" - [term] - progress = { when = 'always' } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] \"always\" progress requires a `width` key -", - ) - .run(); -} - -#[cargo_test] -fn bad_progress_config_missing_when() { - let p = project() - .file( - ".cargo/config", - r#" - [term] - progress = { width = 1000 } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: missing field `when` -", - ) - .run(); -} - -#[cargo_test] -fn always_shows_progress() { - const N: usize = 3; - let mut deps = String::new(); - for i in 1..=N { - Package::new(&format!("dep{}", i), "1.0.0").publish(); - deps.push_str(&format!("dep{} = \"1.0\"\n", i)); - } - - let p = project() - .file( - ".cargo/config", - r#" - [term] - progress = { when = 'always', width = 100 } - "#, - ) - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - {} - "#, - deps - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr_contains("[DOWNLOADING] [..] crates [..]") - .with_stderr_contains("[..][DOWNLOADED] 3 crates ([..]) in [..]") - .with_stderr_contains("[BUILDING] [..] [..]/4: [..]") - .run(); -} - -#[cargo_test] -fn never_progress() { - const N: usize = 3; - let mut deps = String::new(); - for i in 1..=N { - Package::new(&format!("dep{}", i), "1.0.0").publish(); - deps.push_str(&format!("dep{} = \"1.0\"\n", i)); - } - - let p = project() - .file( - ".cargo/config", - r#" - [term] - progress = { when = 'never' } - "#, - ) - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - {} - "#, - deps - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr_does_not_contain("[DOWNLOADING] [..] crates [..]") - .with_stderr_does_not_contain("[..][DOWNLOADED] 3 crates ([..]) in [..]") - .with_stderr_does_not_contain("[BUILDING] [..] [..]/4: [..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/pub_priv.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/pub_priv.rs deleted file mode 100644 index 781716bb2..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/pub_priv.rs +++ /dev/null @@ -1,207 +0,0 @@ -//! Tests for public/private dependencies. - -use cargo_test_support::registry::Package; -use cargo_test_support::{is_nightly, project}; - -#[cargo_test] -fn exported_priv_warning() { - if !is_nightly() { - // exported_private_dependencies lint is unstable - return; - } - Package::new("priv_dep", "0.1.0") - .file("src/lib.rs", "pub struct FromPriv;") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["public-dependency"] - - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - priv_dep = "0.1.0" - "#, - ) - .file( - "src/lib.rs", - " - extern crate priv_dep; - pub fn use_priv(_: priv_dep::FromPriv) {} - ", - ) - .build(); - - p.cargo("build --message-format=short") - .masquerade_as_nightly_cargo() - .with_stderr_contains( - "\ -src/lib.rs:3:13: warning: type `[..]FromPriv` from private dependency 'priv_dep' in public interface -", - ) - .run() -} - -#[cargo_test] -fn exported_pub_dep() { - if !is_nightly() { - // exported_private_dependencies lint is unstable - return; - } - Package::new("pub_dep", "0.1.0") - .file("src/lib.rs", "pub struct FromPub;") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["public-dependency"] - - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - pub_dep = {version = "0.1.0", public = true} - "#, - ) - .file( - "src/lib.rs", - " - extern crate pub_dep; - pub fn use_pub(_: pub_dep::FromPub) {} - ", - ) - .build(); - - p.cargo("build --message-format=short") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] pub_dep v0.1.0 ([..]) -[COMPILING] pub_dep v0.1.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run() -} - -#[cargo_test] -pub fn requires_nightly_cargo() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["public-dependency"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --message-format=short") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - the cargo feature `public-dependency` requires a nightly version of Cargo, but this is the `stable` channel - See https://doc.rust-lang.org/book/appendix-07-nightly-rust.html for more information about Rust release channels. - See https://doc.rust-lang.org/[..]cargo/reference/unstable.html#public-dependency for more information about using this feature. -" - ) - .run() -} - -#[cargo_test] -fn requires_feature() { - Package::new("pub_dep", "0.1.0") - .file("src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - pub_dep = { version = "0.1.0", public = true } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --message-format=short") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - feature `public-dependency` is required - - The package requires the Cargo feature called `public-dependency`, \ - but that feature is not stabilized in this version of Cargo (1.[..]). - Consider adding `cargo-features = [\"public-dependency\"]` to the top of Cargo.toml \ - (above the [package] table) to tell Cargo you are opting in to use this unstable feature. - See https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#public-dependency \ - for more information about the status of this feature. -", - ) - .run() -} - -#[cargo_test] -fn pub_dev_dependency() { - Package::new("pub_dep", "0.1.0") - .file("src/lib.rs", "pub struct FromPub;") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["public-dependency"] - - [package] - name = "foo" - version = "0.0.1" - - [dev-dependencies] - pub_dep = {version = "0.1.0", public = true} - "#, - ) - .file( - "src/lib.rs", - " - extern crate pub_dep; - pub fn use_pub(_: pub_dep::FromPub) {} - ", - ) - .build(); - - p.cargo("build --message-format=short") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - 'public' specifier can only be used on regular dependencies, not Development dependencies -", - ) - .run() -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/publish.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/publish.rs deleted file mode 100644 index 8085259fb..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/publish.rs +++ /dev/null @@ -1,1732 +0,0 @@ -//! Tests for the `cargo publish` command. - -use cargo_test_support::git::{self, repo}; -use cargo_test_support::paths; -use cargo_test_support::registry::{self, registry_url, Package}; -use cargo_test_support::{basic_manifest, no_such_file_err_msg, project, publish}; -use std::fs; - -const CLEAN_FOO_JSON: &str = r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [], - "description": "foo", - "documentation": "foo", - "features": {}, - "homepage": "foo", - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": "foo", - "vers": "0.0.1" - } -"#; - -fn validate_upload_foo() { - publish::validate_upload( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [], - "description": "foo", - "documentation": null, - "features": {}, - "homepage": null, - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "vers": "0.0.1" - } - "#, - "foo-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - ); -} - -fn validate_upload_bar() { - publish::validate_upload( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [], - "description": "bar", - "documentation": null, - "features": {}, - "homepage": null, - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "bar", - "readme": null, - "readme_file": null, - "repository": null, - "vers": "0.0.1" - } - "#, - "bar-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - ); -} - -fn validate_upload_foo_clean() { - publish::validate_upload( - CLEAN_FOO_JSON, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - ".cargo_vcs_info.json", - ], - ); -} - -#[cargo_test] -fn simple() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --no-verify --token sekrit") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[UPLOADING] foo v0.0.1 ([CWD]) -", - ) - .run(); - - validate_upload_foo(); -} - -#[cargo_test] -fn old_token_location() { - // Check that the `token` key works at the root instead of under a - // `[registry]` table. - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - let credentials = paths::home().join(".cargo/credentials"); - fs::remove_file(&credentials).unwrap(); - - // Verify can't publish without a token. - p.cargo("publish --no-verify") - .with_status(101) - .with_stderr_contains( - "[ERROR] no upload token found, \ - please run `cargo login` or pass `--token`", - ) - .run(); - - fs::write(&credentials, r#"token = "api-token""#).unwrap(); - - p.cargo("publish --no-verify") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[WARNING] using `registry.token` config value with source replacement is deprecated -This may become a hard error in the future[..] -Use the --token command-line flag to remove this warning. -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[UPLOADING] foo v0.0.1 ([CWD]) -", - ) - .run(); - - validate_upload_foo(); -} - -#[cargo_test] -fn simple_with_index() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --no-verify --token sekrit --index") - .arg(registry_url().to_string()) - .run(); - - validate_upload_foo(); -} - -#[cargo_test] -fn git_deps() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [dependencies.foo] - git = "git://path/to/nowhere" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish -v --no-verify --token sekrit") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -[ERROR] all dependencies must have a version specified when publishing. -dependency `foo` does not specify a version -Note: The published dependency will use the version from crates.io, -the `git` specification will be removed from the dependency declaration. -", - ) - .run(); -} - -#[cargo_test] -fn path_dependency_no_version() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("publish --token sekrit") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -[ERROR] all dependencies must have a version specified when publishing. -dependency `bar` does not specify a version -Note: The published dependency will use the version from crates.io, -the `path` specification will be removed from the dependency declaration. -", - ) - .run(); -} - -#[cargo_test] -fn unpublishable_crate() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - publish = false - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --index") - .arg(registry_url().to_string()) - .with_status(101) - .with_stderr( - "\ -[ERROR] `foo` cannot be published. -The registry `crates-io` is not listed in the `publish` value in Cargo.toml. -", - ) - .run(); -} - -#[cargo_test] -fn dont_publish_dirty() { - registry::init(); - let p = project().file("bar", "").build(); - - let _ = git::repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --token sekrit") - .with_status(101) - .with_stderr( - "\ -[UPDATING] `[..]` index -error: 1 files in the working directory contain changes that were not yet \ -committed into git: - -bar - -to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag -", - ) - .run(); -} - -#[cargo_test] -fn publish_clean() { - registry::init(); - - let p = project().build(); - - let _ = repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --token sekrit").run(); - - validate_upload_foo_clean(); -} - -#[cargo_test] -fn publish_in_sub_repo() { - registry::init(); - - let p = project().no_manifest().file("baz", "").build(); - - let _ = repo(&paths::root().join("foo")) - .file( - "bar/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --token sekrit").cwd("bar").run(); - - validate_upload_foo_clean(); -} - -#[cargo_test] -fn publish_when_ignored() { - registry::init(); - - let p = project().file("baz", "").build(); - - let _ = repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file(".gitignore", "baz") - .build(); - - p.cargo("publish --token sekrit").run(); - - publish::validate_upload( - CLEAN_FOO_JSON, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - ".gitignore", - ".cargo_vcs_info.json", - ], - ); -} - -#[cargo_test] -fn ignore_when_crate_ignored() { - registry::init(); - - let p = project().no_manifest().file("bar/baz", "").build(); - - let _ = repo(&paths::root().join("foo")) - .file(".gitignore", "bar") - .nocommit_file( - "bar/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .nocommit_file("bar/src/main.rs", "fn main() {}"); - p.cargo("publish --token sekrit").cwd("bar").run(); - - publish::validate_upload( - CLEAN_FOO_JSON, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - "baz", - ], - ); -} - -#[cargo_test] -fn new_crate_rejected() { - registry::init(); - - let p = project().file("baz", "").build(); - - let _ = repo(&paths::root().join("foo")) - .nocommit_file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .nocommit_file("src/main.rs", "fn main() {}"); - p.cargo("publish --token sekrit") - .with_status(101) - .with_stderr_contains( - "[ERROR] 3 files in the working directory contain \ - changes that were not yet committed into git:", - ) - .run(); -} - -#[cargo_test] -fn dry_run() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --dry-run --index") - .arg(registry_url().to_string()) - .with_stderr( - "\ -[UPDATING] `[..]` index -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[UPLOADING] foo v0.0.1 ([CWD]) -[WARNING] aborting upload due to dry run -", - ) - .run(); - - // Ensure the API request wasn't actually made - assert!(registry::api_path().join("api/v1/crates").exists()); - assert!(!registry::api_path().join("api/v1/crates/new").exists()); -} - -#[cargo_test] -fn registry_not_in_publish_list() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - publish = [ - "test" - ] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish") - .arg("--registry") - .arg("alternative") - .with_status(101) - .with_stderr( - "\ -[ERROR] `foo` cannot be published. -The registry `alternative` is not listed in the `publish` value in Cargo.toml. -", - ) - .run(); -} - -#[cargo_test] -fn publish_empty_list() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - publish = [] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --registry alternative") - .with_status(101) - .with_stderr( - "\ -[ERROR] `foo` cannot be published. -The registry `alternative` is not listed in the `publish` value in Cargo.toml. -", - ) - .run(); -} - -#[cargo_test] -fn publish_allowed_registry() { - registry::alt_init(); - - let p = project().build(); - - let _ = repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - publish = ["alternative"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --registry alternative").run(); - - publish::validate_alt_upload( - CLEAN_FOO_JSON, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - ".cargo_vcs_info.json", - ], - ); -} - -#[cargo_test] -fn publish_implicitly_to_only_allowed_registry() { - registry::alt_init(); - - let p = project().build(); - - let _ = repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - publish = ["alternative"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish").run(); - - publish::validate_alt_upload( - CLEAN_FOO_JSON, - "foo-0.0.1.crate", - &[ - "Cargo.lock", - "Cargo.toml", - "Cargo.toml.orig", - "src/main.rs", - ".cargo_vcs_info.json", - ], - ); -} - -#[cargo_test] -fn publish_fail_with_no_registry_specified() { - registry::init(); - - let p = project().build(); - - let _ = repo(&paths::root().join("foo")) - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - publish = ["alternative", "test"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish") - .with_status(101) - .with_stderr( - "\ -[ERROR] `foo` cannot be published. -The registry `crates-io` is not listed in the `publish` value in Cargo.toml. -", - ) - .run(); -} - -#[cargo_test] -fn block_publish_no_registry() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - publish = [] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --registry alternative") - .with_status(101) - .with_stderr( - "\ -[ERROR] `foo` cannot be published. -The registry `alternative` is not listed in the `publish` value in Cargo.toml. -", - ) - .run(); -} - -#[cargo_test] -fn publish_with_crates_io_explicit() { - // Explicitly setting `crates-io` in the publish list. - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - publish = ["crates-io"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --registry alternative") - .with_status(101) - .with_stderr( - "\ -[ERROR] `foo` cannot be published. -The registry `alternative` is not listed in the `publish` value in Cargo.toml. -", - ) - .run(); - - p.cargo("publish").run(); -} - -#[cargo_test] -fn publish_with_select_features() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [features] - required = [] - optional = [] - "#, - ) - .file( - "src/main.rs", - "#[cfg(not(feature = \"required\"))] - compile_error!(\"This crate requires `required` feature!\"); - fn main() {}", - ) - .build(); - - p.cargo("publish --features required --token sekrit") - .with_stderr_contains("[UPLOADING] foo v0.0.1 ([CWD])") - .run(); -} - -#[cargo_test] -fn publish_with_all_features() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [features] - required = [] - optional = [] - "#, - ) - .file( - "src/main.rs", - "#[cfg(not(feature = \"required\"))] - compile_error!(\"This crate requires `required` feature!\"); - fn main() {}", - ) - .build(); - - p.cargo("publish --all-features --token sekrit") - .with_stderr_contains("[UPLOADING] foo v0.0.1 ([CWD])") - .run(); -} - -#[cargo_test] -fn publish_with_no_default_features() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - - [features] - default = ["required"] - required = [] - "#, - ) - .file( - "src/main.rs", - "#[cfg(not(feature = \"required\"))] - compile_error!(\"This crate requires `required` feature!\"); - fn main() {}", - ) - .build(); - - p.cargo("publish --no-default-features --token sekrit") - .with_stderr_contains("error: This crate requires `required` feature!") - .with_status(101) - .run(); -} - -#[cargo_test] -fn publish_with_patch() { - Package::new("bar", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - [dependencies] - bar = "1.0" - [patch.crates-io] - bar = { path = "bar" } - "#, - ) - .file( - "src/main.rs", - "extern crate bar; - fn main() { - bar::newfunc(); - }", - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "1.0.0")) - .file("bar/src/lib.rs", "pub fn newfunc() {}") - .build(); - - // Check that it works with the patched crate. - p.cargo("build").run(); - - // Check that verify fails with patched crate which has new functionality. - p.cargo("publish --token sekrit") - .with_stderr_contains("[..]newfunc[..]") - .with_status(101) - .run(); - - // Remove the usage of new functionality and try again. - p.change_file("src/main.rs", "extern crate bar; pub fn main() {}"); - - p.cargo("publish --token sekrit").run(); - - // Note, use of `registry` in the deps here is an artifact that this - // publishes to a fake, local registry that is pretending to be crates.io. - // Normal publishes would set it to null. - publish::validate_upload( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [ - { - "default_features": true, - "features": [], - "kind": "normal", - "name": "bar", - "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", - "target": null, - "version_req": "^1.0" - } - ], - "description": "foo", - "documentation": null, - "features": {}, - "homepage": null, - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "vers": "0.0.1" - } - "#, - "foo-0.0.1.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - ); -} - -#[cargo_test] -fn publish_checks_for_token_before_verify() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - let credentials = paths::home().join(".cargo/credentials"); - fs::remove_file(&credentials).unwrap(); - - // Assert upload token error before the package is verified - p.cargo("publish") - .with_status(101) - .with_stderr_contains( - "[ERROR] no upload token found, \ - please run `cargo login` or pass `--token`", - ) - .with_stderr_does_not_contain("[VERIFYING] foo v0.0.1 ([CWD])") - .run(); - - // Assert package verified successfully on dry run - p.cargo("publish --dry-run") - .with_status(0) - .with_stderr_contains("[VERIFYING] foo v0.0.1 ([CWD])") - .run(); -} - -#[cargo_test] -fn publish_with_bad_source() { - let p = project() - .file( - ".cargo/config", - r#" - [source.crates-io] - replace-with = 'local-registry' - - [source.local-registry] - local-registry = 'registry' - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --token sekrit") - .with_status(101) - .with_stderr( - "\ -[ERROR] registry `[..]/foo/registry` does not support API commands. -Check for a source-replacement in .cargo/config. -", - ) - .run(); - - p.change_file( - ".cargo/config", - r#" - [source.crates-io] - replace-with = "vendored-sources" - - [source.vendored-sources] - directory = "vendor" - "#, - ); - - p.cargo("publish --token sekrit") - .with_status(101) - .with_stderr( - "\ -[ERROR] dir [..]/foo/vendor does not support API commands. -Check for a source-replacement in .cargo/config. -", - ) - .run(); -} - -#[cargo_test] -fn publish_git_with_version() { - // A dependency with both `git` and `version`. - Package::new("dep1", "1.0.1") - .file("src/lib.rs", "pub fn f() -> i32 {1}") - .publish(); - - let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_manifest("dep1", "1.0.0")) - .file("src/lib.rs", "pub fn f() -> i32 {2}") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - edition = "2018" - license = "MIT" - description = "foo" - - [dependencies] - dep1 = {{version = "1.0", git="{}"}} - "#, - git_project.url() - ), - ) - .file( - "src/main.rs", - r#" - pub fn main() { - println!("{}", dep1::f()); - } - "#, - ) - .build(); - - p.cargo("run").with_stdout("2").run(); - p.cargo("publish --no-verify --token sekrit").run(); - - publish::validate_upload_with_contents( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [ - { - "default_features": true, - "features": [], - "kind": "normal", - "name": "dep1", - "optional": false, - "registry": "https://github.com/rust-lang/crates.io-index", - "target": null, - "version_req": "^1.0" - } - ], - "description": "foo", - "documentation": null, - "features": {}, - "homepage": null, - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "vers": "0.1.0" - } - "#, - "foo-0.1.0.crate", - &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], - &[ - ( - "Cargo.toml", - // Check that only `version` is included in Cargo.toml. - &format!( - "{}\n\ - [package]\n\ - edition = \"2018\"\n\ - name = \"foo\"\n\ - version = \"0.1.0\"\n\ - authors = []\n\ - description = \"foo\"\n\ - license = \"MIT\"\n\ - [dependencies.dep1]\n\ - version = \"1.0\"\n\ - ", - cargo::core::package::MANIFEST_PREAMBLE - ), - ), - ( - "Cargo.lock", - // The important check here is that it is 1.0.1 in the registry. - "# This file is automatically @generated by Cargo.\n\ - # It is not intended for manual editing.\n\ - version = 3\n\ - \n\ - [[package]]\n\ - name = \"dep1\"\n\ - version = \"1.0.1\"\n\ - source = \"registry+https://github.com/rust-lang/crates.io-index\"\n\ - checksum = \"[..]\"\n\ - \n\ - [[package]]\n\ - name = \"foo\"\n\ - version = \"0.1.0\"\n\ - dependencies = [\n\ - \x20\"dep1\",\n\ - ]\n\ - ", - ), - ], - ); -} - -#[cargo_test] -fn publish_dev_dep_no_version() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - - [dev-dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --token sekrit") - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.1.0 [..] -[UPLOADING] foo v0.1.0 [..] -", - ) - .run(); - - publish::validate_upload_with_contents( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [], - "description": "foo", - "documentation": "foo", - "features": {}, - "homepage": "foo", - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": "foo", - "vers": "0.1.0" - } - "#, - "foo-0.1.0.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], - &[( - "Cargo.toml", - &format!( - r#"{} -[package] -name = "foo" -version = "0.1.0" -authors = [] -description = "foo" -homepage = "foo" -documentation = "foo" -license = "MIT" -repository = "foo" - -[dev-dependencies] -"#, - cargo::core::package::MANIFEST_PREAMBLE - ), - )], - ); -} - -#[cargo_test] -fn credentials_ambiguous_filename() { - registry::init(); - - let credentials_toml = paths::home().join(".cargo/credentials.toml"); - fs::write(credentials_toml, r#"token = "api-token""#).unwrap(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --no-verify --token sekrit") - .with_stderr_contains( - "\ -[WARNING] Both `[..]/credentials` and `[..]/credentials.toml` exist. Using `[..]/credentials` -", - ) - .run(); - - validate_upload_foo(); -} - -#[cargo_test] -fn index_requires_token() { - // --index will not load registry.token to avoid possibly leaking - // crates.io token to another server. - registry::init(); - let credentials = paths::home().join(".cargo/credentials"); - fs::remove_file(&credentials).unwrap(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --index") - .arg(registry_url().to_string()) - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] command-line argument --index requires --token to be specified -", - ) - .run(); -} - -#[cargo_test] -fn registry_token_with_source_replacement() { - // publish with source replacement without --token - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify") - .with_stderr( - "\ -[UPDATING] [..] -[WARNING] using `registry.token` config value with source replacement is deprecated -This may become a hard error in the future[..] -Use the --token command-line flag to remove this warning. -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]) -[UPLOADING] foo v0.0.1 ([CWD]) -", - ) - .run(); -} - -#[cargo_test] -fn publish_with_missing_readme() { - registry::init(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - license = "MIT" - description = "foo" - homepage = "https://example.com/" - readme = "foo.md" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --token sekrit") - .with_status(101) - .with_stderr(&format!( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.1.0 [..] -[UPLOADING] foo v0.1.0 [..] -[ERROR] failed to read `readme` file for package `foo v0.1.0 ([ROOT]/foo)` - -Caused by: - failed to read `[ROOT]/foo/foo.md` - -Caused by: - {} -", - no_such_file_err_msg() - )) - .run(); -} - -#[cargo_test] -fn api_error_json() { - // Registry returns an API error. - let t = registry::RegistryBuilder::new().build_api_server(&|_headers| { - (403, &r#"{"errors": [{"detail": "you must be logged in"}]}"#) - }); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --registry alternative") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.0.1 [..] -[UPLOADING] foo v0.0.1 [..] -[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ - -Caused by: - the remote server responded with an error (status 403 Forbidden): you must be logged in -", - ) - .run(); - - t.join().unwrap(); -} - -#[cargo_test] -fn api_error_200() { - // Registry returns an API error with a 200 status code. - let t = registry::RegistryBuilder::new().build_api_server(&|_headers| { - ( - 200, - &r#"{"errors": [{"detail": "max upload size is 123"}]}"#, - ) - }); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --registry alternative") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.0.1 [..] -[UPLOADING] foo v0.0.1 [..] -[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ - -Caused by: - the remote server responded with an error: max upload size is 123 -", - ) - .run(); - - t.join().unwrap(); -} - -#[cargo_test] -fn api_error_code() { - // Registry returns an error code without a JSON message. - let t = registry::RegistryBuilder::new().build_api_server(&|_headers| (400, &"go away")); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --registry alternative") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.0.1 [..] -[UPLOADING] foo v0.0.1 [..] -[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ - -Caused by: - failed to get a 200 OK response, got 400 - headers: - HTTP/1.1 400 - Content-Length: 7 - - body: - go away -", - ) - .run(); - - t.join().unwrap(); -} - -#[cargo_test] -fn api_curl_error() { - // Registry has a network error. - let t = registry::RegistryBuilder::new().build_api_server(&|_headers| panic!("broke!")); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // This doesn't check for the exact text of the error in the remote - // possibility that cargo is linked with a weird version of libcurl, or - // curl changes the text of the message. Currently the message 52 - // (CURLE_GOT_NOTHING) is: - // Server returned nothing (no headers, no data) (Empty reply from server) - p.cargo("publish --no-verify --registry alternative") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.0.1 [..] -[UPLOADING] foo v0.0.1 [..] -[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ - -Caused by: - [52] [..] -", - ) - .run(); - - let e = t.join().unwrap_err(); - assert_eq!(*e.downcast::<&str>().unwrap(), "broke!"); -} - -#[cargo_test] -fn api_other_error() { - // Registry returns an invalid response. - let t = registry::RegistryBuilder::new().build_api_server(&|_headers| (200, b"\xff")); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --no-verify --registry alternative") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.0.1 [..] -[UPLOADING] foo v0.0.1 [..] -[ERROR] failed to publish to registry at http://127.0.0.1:[..]/ - -Caused by: - invalid response from server - -Caused by: - response body was not valid utf-8 -", - ) - .run(); - - t.join().unwrap(); -} - -#[cargo_test] -fn in_workspace() { - registry::init(); - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar"] - "#, - ) - .file( - "foo/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("foo/src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - license = "MIT" - description = "bar" - workspace = ".." - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - p.cargo("publish --no-verify --token sekrit -p foo") - .with_stderr( - "\ -[UPDATING] [..] -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] foo v0.0.1 ([CWD]/foo) -[UPLOADING] foo v0.0.1 ([CWD]/foo) -", - ) - .run(); - - validate_upload_foo(); - - p.cargo("publish --no-verify --token sekrit -p bar") - .with_stderr( - "\ -[UPDATING] [..] -[WARNING] manifest has no documentation, [..] -See [..] -[PACKAGING] bar v0.0.1 ([CWD]/bar) -[UPLOADING] bar v0.0.1 ([CWD]/bar) -", - ) - .run(); - - validate_upload_bar(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/publish_lockfile.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/publish_lockfile.rs deleted file mode 100644 index b362cbf76..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/publish_lockfile.rs +++ /dev/null @@ -1,482 +0,0 @@ -//! Tests for including `Cargo.lock` when publishing/packaging. - -use std::fs::File; - -use cargo_test_support::registry::Package; -use cargo_test_support::{ - basic_manifest, cargo_process, git, paths, project, publish::validate_crate_contents, -}; - -fn pl_manifest(name: &str, version: &str, extra: &str) -> String { - format!( - r#" - [package] - name = "{}" - version = "{}" - authors = [] - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - - {} - "#, - name, version, extra - ) -} - -#[cargo_test] -fn removed() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["publish-lockfile"] - [package] - name = "foo" - version = "0.1.0" - publish-lockfile = true - license = "MIT" - description = "foo" - documentation = "foo" - homepage = "foo" - repository = "foo" - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("package") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at [..] - -Caused by: - the cargo feature `publish-lockfile` has been removed in the 1.37 release - - Remove the feature from Cargo.toml to remove this error. - See https://doc.rust-lang.org/[..]cargo/reference/unstable.html#publish-lockfile [..] -", - ) - .run(); -} - -#[cargo_test] -fn package_lockfile() { - let p = project() - .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("package") - .with_stderr( - "\ -[PACKAGING] foo v0.0.1 ([CWD]) -[VERIFYING] foo v0.0.1 ([CWD]) -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - assert!(p.root().join("target/package/foo-0.0.1.crate").is_file()); - p.cargo("package -l") - .with_stdout( - "\ -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); - p.cargo("package").with_stdout("").run(); - - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - validate_crate_contents( - f, - "foo-0.0.1.crate", - &["Cargo.toml", "Cargo.toml.orig", "Cargo.lock", "src/main.rs"], - &[], - ); -} - -#[cargo_test] -fn package_lockfile_git_repo() { - // Create a Git repository containing a minimal Rust project. - let g = git::repo(&paths::root().join("foo")) - .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) - .file("src/main.rs", "fn main() {}") - .build(); - cargo_process("package -l") - .cwd(g.root()) - .with_stdout( - "\ -.cargo_vcs_info.json -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); - cargo_process("package -v") - .cwd(g.root()) - .with_stderr( - "\ -[PACKAGING] foo v0.0.1 ([..]) -[ARCHIVING] .cargo_vcs_info.json -[ARCHIVING] Cargo.lock -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] src/main.rs -[VERIFYING] foo v0.0.1 ([..]) -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc --crate-name foo src/main.rs [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn no_lock_file_with_library() { - let p = project() - .file("Cargo.toml", &pl_manifest("foo", "0.0.1", "")) - .file("src/lib.rs", "") - .build(); - - p.cargo("package").run(); - - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - validate_crate_contents( - f, - "foo-0.0.1.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], - &[], - ); -} - -#[cargo_test] -fn lock_file_and_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - "#, - ) - .file("foo/Cargo.toml", &pl_manifest("foo", "0.0.1", "")) - .file("foo/src/main.rs", "fn main() {}") - .build(); - - p.cargo("package").cwd("foo").run(); - - let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap(); - validate_crate_contents( - f, - "foo-0.0.1.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/main.rs", "Cargo.lock"], - &[], - ); -} - -#[cargo_test] -fn note_resolve_changes() { - // `multi` has multiple sources (path and registry). - Package::new("mutli", "0.1.0").publish(); - // `updated` is always from registry, but should not change. - Package::new("updated", "1.0.0").publish(); - // `patched` is [patch]ed. - Package::new("patched", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - &pl_manifest( - "foo", - "0.0.1", - r#" - [dependencies] - mutli = { path = "mutli", version = "0.1" } - updated = "1.0" - patched = "1.0" - - [patch.crates-io] - patched = { path = "patched" } - "#, - ), - ) - .file("src/main.rs", "fn main() {}") - .file("mutli/Cargo.toml", &basic_manifest("mutli", "0.1.0")) - .file("mutli/src/lib.rs", "") - .file("patched/Cargo.toml", &basic_manifest("patched", "1.0.0")) - .file("patched/src/lib.rs", "") - .build(); - - p.cargo("generate-lockfile").run(); - - // Make sure this does not change or warn. - Package::new("updated", "1.0.1").publish(); - - p.cargo("package --no-verify -v --allow-dirty") - .with_stderr_unordered( - "\ -[PACKAGING] foo v0.0.1 ([..]) -[ARCHIVING] Cargo.lock -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] src/main.rs -[UPDATING] `[..]` index -[NOTE] package `mutli v0.1.0` added to the packaged Cargo.lock file, was originally sourced from `[..]/foo/mutli` -[NOTE] package `patched v1.0.0` added to the packaged Cargo.lock file, was originally sourced from `[..]/foo/patched` -", - ) - .run(); -} - -#[cargo_test] -fn outdated_lock_version_change_does_not_warn() { - // If the version of the package being packaged changes, but Cargo.lock is - // not updated, don't bother warning about it. - let p = project() - .file("Cargo.toml", &pl_manifest("foo", "0.1.0", "")) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("generate-lockfile").run(); - - p.change_file("Cargo.toml", &pl_manifest("foo", "0.2.0", "")); - - p.cargo("package --no-verify") - .with_stderr("[PACKAGING] foo v0.2.0 ([..])") - .run(); -} - -#[cargo_test] -fn no_warn_workspace_extras() { - // Other entries in workspace lock file should be ignored. - Package::new("dep1", "1.0.0").publish(); - Package::new("dep2", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - &pl_manifest( - "a", - "0.1.0", - r#" - [dependencies] - dep1 = "1.0" - "#, - ), - ) - .file("a/src/main.rs", "fn main() {}") - .file( - "b/Cargo.toml", - &pl_manifest( - "b", - "0.1.0", - r#" - [dependencies] - dep2 = "1.0" - "#, - ), - ) - .file("b/src/main.rs", "fn main() {}") - .build(); - p.cargo("generate-lockfile").run(); - p.cargo("package --no-verify") - .cwd("a") - .with_stderr( - "\ -[PACKAGING] a v0.1.0 ([..]) -[UPDATING] `[..]` index -", - ) - .run(); -} - -#[cargo_test] -fn warn_package_with_yanked() { - Package::new("bar", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - &pl_manifest( - "foo", - "0.0.1", - r#" - [dependencies] - bar = "0.1" - "#, - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - p.cargo("generate-lockfile").run(); - Package::new("bar", "0.1.0").yanked(true).publish(); - // Make sure it sticks with the locked (yanked) version. - Package::new("bar", "0.1.1").publish(); - p.cargo("package --no-verify") - .with_stderr( - "\ -[PACKAGING] foo v0.0.1 ([..]) -[UPDATING] `[..]` index -[WARNING] package `bar v0.1.0` in Cargo.lock is yanked in registry \ - `crates-io`, consider updating to a version that is not yanked -", - ) - .run(); -} - -#[cargo_test] -fn warn_install_with_yanked() { - Package::new("bar", "0.1.0").yanked(true).publish(); - Package::new("bar", "0.1.1").publish(); - Package::new("foo", "0.1.0") - .dep("bar", "0.1") - .file("src/main.rs", "fn main() {}") - .file( - "Cargo.lock", - r#" -[[package]] -name = "bar" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "foo" -version = "0.1.0" -dependencies = [ - "bar 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - "#, - ) - .publish(); - - cargo_process("install --locked foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.1.0 (registry `[..]`) -[INSTALLING] foo v0.1.0 -[WARNING] package `bar v0.1.0` in Cargo.lock is yanked in registry \ - `crates-io`, consider running without --locked -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 (registry `[..]`) -[COMPILING] bar v0.1.0 -[COMPILING] foo v0.1.0 -[FINISHED] release [optimized] target(s) in [..] -[INSTALLING] [..]/.cargo/bin/foo[EXE] -[INSTALLED] package `foo v0.1.0` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); - - // Try again without --locked, make sure it uses 0.1.1 and does not warn. - cargo_process("install --force foo") - .with_stderr( - "\ -[UPDATING] `[..]` index -[INSTALLING] foo v0.1.0 -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.1 (registry `[..]`) -[COMPILING] bar v0.1.1 -[COMPILING] foo v0.1.0 -[FINISHED] release [optimized] target(s) in [..] -[REPLACING] [..]/.cargo/bin/foo[EXE] -[REPLACED] package `foo v0.1.0` with `foo v0.1.0` (executable `foo[EXE]`) -[WARNING] be sure to add [..] -", - ) - .run(); -} - -#[cargo_test] -fn ignore_lockfile() { - // With an explicit `include` list, but Cargo.lock in .gitignore, don't - // complain about `Cargo.lock` being ignored. Note that it is still - // included in the packaged regardless. - let p = git::new("foo", |p| { - p.file( - "Cargo.toml", - &pl_manifest( - "foo", - "0.0.1", - r#" - include = [ - "src/main.rs" - ] - "#, - ), - ) - .file("src/main.rs", "fn main() {}") - .file(".gitignore", "Cargo.lock") - }); - p.cargo("package -l") - .with_stdout( - "\ -.cargo_vcs_info.json -Cargo.lock -Cargo.toml -Cargo.toml.orig -src/main.rs -", - ) - .run(); - p.cargo("generate-lockfile").run(); - p.cargo("package -v") - .with_stderr( - "\ -[PACKAGING] foo v0.0.1 ([..]) -[ARCHIVING] .cargo_vcs_info.json -[ARCHIVING] Cargo.lock -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] src/main.rs -[VERIFYING] foo v0.0.1 ([..]) -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc --crate-name foo src/main.rs [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn ignore_lockfile_inner() { - // Ignore `Cargo.lock` if in .gitignore in a git subdirectory. - let p = git::new("foo", |p| { - p.no_manifest() - .file("bar/Cargo.toml", &pl_manifest("bar", "0.0.1", "")) - .file("bar/src/main.rs", "fn main() {}") - .file("bar/.gitignore", "Cargo.lock") - }); - p.cargo("generate-lockfile").cwd("bar").run(); - p.cargo("package -v --no-verify") - .cwd("bar") - .with_stderr( - "\ -[PACKAGING] bar v0.0.1 ([..]) -[ARCHIVING] .cargo_vcs_info.json -[ARCHIVING] .gitignore -[ARCHIVING] Cargo.lock -[ARCHIVING] Cargo.toml -[ARCHIVING] Cargo.toml.orig -[ARCHIVING] src/main.rs -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/read_manifest.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/read_manifest.rs deleted file mode 100644 index b5e9f05a3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/read_manifest.rs +++ /dev/null @@ -1,206 +0,0 @@ -//! Tests for the `cargo read-manifest` command. - -use cargo_test_support::{basic_bin_manifest, main_file, project}; - -fn manifest_output(readme_value: &str) -> String { - format!( - r#" -{{ - "authors": [ - "wycats@example.com" - ], - "categories": [], - "default_run": null, - "name":"foo", - "readme": {}, - "homepage": null, - "documentation": null, - "repository": null, - "rust_version": null, - "version":"0.5.0", - "id":"foo[..]0.5.0[..](path+file://[..]/foo)", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "description": null, - "edition": "2015", - "source":null, - "dependencies":[], - "targets":[{{ - "kind":["bin"], - "crate_types":["bin"], - "doc": true, - "doctest": false, - "test": true, - "edition": "2015", - "name":"foo", - "src_path":"[..]/foo/src/foo.rs" - }}], - "features":{{}}, - "manifest_path":"[..]Cargo.toml", - "metadata": null, - "publish": null -}}"#, - readme_value - ) -} - -fn manifest_output_no_readme() -> String { - manifest_output("null") -} - -pub fn basic_bin_manifest_with_readme(name: &str, readme_filename: &str) -> String { - format!( - r#" - [package] - - name = "{}" - version = "0.5.0" - authors = ["wycats@example.com"] - readme = {} - - [[bin]] - - name = "{}" - "#, - name, readme_filename, name - ) -} - -#[cargo_test] -fn cargo_read_manifest_path_to_cargo_toml_relative() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest --manifest-path foo/Cargo.toml") - .cwd(p.root().parent().unwrap()) - .with_json(&manifest_output_no_readme()) - .run(); -} - -#[cargo_test] -fn cargo_read_manifest_path_to_cargo_toml_absolute() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest --manifest-path") - .arg(p.root().join("Cargo.toml")) - .cwd(p.root().parent().unwrap()) - .with_json(&manifest_output_no_readme()) - .run(); -} - -#[cargo_test] -fn cargo_read_manifest_path_to_cargo_toml_parent_relative() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest --manifest-path foo") - .cwd(p.root().parent().unwrap()) - .with_status(101) - .with_stderr( - "[ERROR] the manifest-path must be \ - a path to a Cargo.toml file", - ) - .run(); -} - -#[cargo_test] -fn cargo_read_manifest_path_to_cargo_toml_parent_absolute() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest --manifest-path") - .arg(p.root()) - .cwd(p.root().parent().unwrap()) - .with_status(101) - .with_stderr( - "[ERROR] the manifest-path must be \ - a path to a Cargo.toml file", - ) - .run(); -} - -#[cargo_test] -fn cargo_read_manifest_cwd() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest") - .with_json(&manifest_output_no_readme()) - .run(); -} - -#[cargo_test] -fn cargo_read_manifest_with_specified_readme() { - let p = project() - .file( - "Cargo.toml", - &basic_bin_manifest_with_readme("foo", r#""SomeReadme.txt""#), - ) - .file("SomeReadme.txt", "Sample Project") - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest") - .with_json(&manifest_output(&format!(r#""{}""#, "SomeReadme.txt"))) - .run(); -} - -#[cargo_test] -fn cargo_read_manifest_default_readme() { - let readme_filenames = ["README.md", "README.txt", "README"]; - - for readme in readme_filenames.iter() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file(readme, "Sample project") - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest") - .with_json(&manifest_output(&format!(r#""{}""#, readme))) - .run(); - } -} - -#[cargo_test] -fn cargo_read_manifest_suppress_default_readme() { - let p = project() - .file( - "Cargo.toml", - &basic_bin_manifest_with_readme("foo", "false"), - ) - .file("README.txt", "Sample project") - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest") - .with_json(&manifest_output_no_readme()) - .run(); -} - -// If a file named README.md exists, and `readme = true`, the value `README.md` should be defaulted in. -#[cargo_test] -fn cargo_read_manifest_defaults_readme_if_true() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest_with_readme("foo", "true")) - .file("README.md", "Sample project") - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("read-manifest") - .with_json(&manifest_output(r#""README.md""#)) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/registry.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/registry.rs deleted file mode 100644 index c62037012..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/registry.rs +++ /dev/null @@ -1,2090 +0,0 @@ -//! Tests for normal registry dependencies. - -use cargo::core::SourceId; -use cargo_test_support::paths::{self, CargoPathExt}; -use cargo_test_support::registry::{self, registry_path, Dependency, Package}; -use cargo_test_support::{basic_manifest, project}; -use cargo_test_support::{cargo_process, registry::registry_url}; -use cargo_test_support::{git, install::cargo_home, t}; -use cargo_util::paths::remove_dir_all; -use std::fs::{self, File}; -use std::path::Path; - -#[cargo_test] -fn simple() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = ">= 0.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - - p.cargo("clean").run(); - - // Don't download a second time - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = ">= 0.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").publish(); - Package::new("bar", "0.0.1").dep("baz", "*").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) -[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) -[COMPILING] baz v0.0.1 -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn nonexistent() { - Package::new("init", "0.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - nonexistent = ">= 0.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -error: no matching package named `nonexistent` found -location searched: registry [..] -required by package `foo v0.0.1 ([..])` -", - ) - .run(); -} - -#[cargo_test] -fn wrong_case() { - Package::new("init", "0.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - Init = ">= 0.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - // #5678 to make this work - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -error: no matching package found -searched package name: `Init` -perhaps you meant: init -location searched: registry [..] -required by package `foo v0.0.1 ([..])` -", - ) - .run(); -} - -#[cargo_test] -fn mis_hyphenated() { - Package::new("mis-hyphenated", "0.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - mis_hyphenated = ">= 0.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - // #2775 to make this work - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -error: no matching package found -searched package name: `mis_hyphenated` -perhaps you meant: mis-hyphenated -location searched: registry [..] -required by package `foo v0.0.1 ([..])` -", - ) - .run(); -} - -#[cargo_test] -fn wrong_version() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - foo = ">= 1.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("foo", "0.0.1").publish(); - Package::new("foo", "0.0.2").publish(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "\ -error: failed to select a version for the requirement `foo = \">=1.0.0\"` -candidate versions found which didn't match: 0.0.2, 0.0.1 -location searched: `[..]` index (which is replacing registry `[..]`) -required by package `foo v0.0.1 ([..])` -", - ) - .run(); - - Package::new("foo", "0.0.3").publish(); - Package::new("foo", "0.0.4").publish(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "\ -error: failed to select a version for the requirement `foo = \">=1.0.0\"` -candidate versions found which didn't match: 0.0.4, 0.0.3, 0.0.2, ... -location searched: `[..]` index (which is replacing registry `[..]`) -required by package `foo v0.0.1 ([..])` -", - ) - .run(); -} - -#[cargo_test] -fn bad_cksum() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bad-cksum = ">= 0.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - let pkg = Package::new("bad-cksum", "0.0.1"); - pkg.publish(); - t!(File::create(&pkg.archive_dst())); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -[DOWNLOADING] crates ... -[DOWNLOADED] bad-cksum [..] -[ERROR] failed to download replaced source registry `crates-io` - -Caused by: - failed to verify the checksum of `bad-cksum v0.0.1 (registry `dummy-registry`)` -", - ) - .run(); -} - -#[cargo_test] -fn update_registry() { - Package::new("init", "0.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - notyet = ">= 0.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "\ -error: no matching package named `notyet` found -location searched: registry `[..]` -required by package `foo v0.0.1 ([..])` -", - ) - .run(); - - Package::new("notyet", "0.0.1").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[DOWNLOADING] crates ... -[DOWNLOADED] notyet v0.0.1 (registry `dummy-registry`) -[COMPILING] notyet v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn package_with_path_deps() { - Package::new("init", "0.0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - repository = "bar" - - [dependencies.notyet] - version = "0.0.1" - path = "notyet" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("notyet/Cargo.toml", &basic_manifest("notyet", "0.0.1")) - .file("notyet/src/lib.rs", "") - .build(); - - p.cargo("package") - .with_status(101) - .with_stderr_contains( - "\ -[PACKAGING] foo [..] -[UPDATING] [..] -[ERROR] failed to prepare local package for uploading - -Caused by: - no matching package named `notyet` found - location searched: registry `crates-io` - required by package `foo v0.0.1 [..]` -", - ) - .run(); - - Package::new("notyet", "0.0.1").publish(); - - p.cargo("package") - .with_stderr( - "\ -[PACKAGING] foo v0.0.1 ([CWD]) -[UPDATING] `[..]` index -[VERIFYING] foo v0.0.1 ([CWD]) -[DOWNLOADING] crates ... -[DOWNLOADED] notyet v0.0.1 (registry `dummy-registry`) -[COMPILING] notyet v0.0.1 -[COMPILING] foo v0.0.1 ([CWD][..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn lockfile_locks() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - - p.root().move_into_the_past(); - Package::new("bar", "0.0.2").publish(); - - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn lockfile_locks_transitively() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").publish(); - Package::new("bar", "0.0.1").dep("baz", "*").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) -[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) -[COMPILING] baz v0.0.1 -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - - p.root().move_into_the_past(); - Package::new("baz", "0.0.2").publish(); - Package::new("bar", "0.0.2").dep("baz", "*").publish(); - - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn yanks_are_not_used() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").publish(); - Package::new("baz", "0.0.2").yanked(true).publish(); - Package::new("bar", "0.0.1").dep("baz", "*").publish(); - Package::new("bar", "0.0.2") - .dep("baz", "*") - .yanked(true) - .publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) -[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) -[COMPILING] baz v0.0.1 -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn relying_on_a_yank_is_bad() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").publish(); - Package::new("baz", "0.0.2").yanked(true).publish(); - Package::new("bar", "0.0.1").dep("baz", "=0.0.2").publish(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "\ -error: failed to select a version for the requirement `baz = \"=0.0.2\"` -candidate versions found which didn't match: 0.0.1 -location searched: `[..]` index (which is replacing registry `[..]`) -required by package `bar v0.0.1` - ... which satisfies dependency `bar = \"*\"` of package `foo [..]` -", - ) - .run(); -} - -#[cargo_test] -fn yanks_in_lockfiles_are_ok() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").publish(); - - p.cargo("build").run(); - - registry_path().join("3").rm_rf(); - - Package::new("bar", "0.0.1").yanked(true).publish(); - - p.cargo("build").with_stdout("").run(); - - p.cargo("update") - .with_status(101) - .with_stderr_contains( - "\ -error: no matching package named `bar` found -location searched: registry [..] -required by package `foo v0.0.1 ([..])` -", - ) - .run(); -} - -#[cargo_test] -fn yanks_in_lockfiles_are_ok_for_other_update() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - baz = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").publish(); - Package::new("baz", "0.0.1").publish(); - - p.cargo("build").run(); - - registry_path().join("3").rm_rf(); - - Package::new("bar", "0.0.1").yanked(true).publish(); - Package::new("baz", "0.0.1").publish(); - - p.cargo("build").with_stdout("").run(); - - Package::new("baz", "0.0.2").publish(); - - p.cargo("update") - .with_status(101) - .with_stderr_contains( - "\ -error: no matching package named `bar` found -location searched: registry [..] -required by package `foo v0.0.1 ([..])` -", - ) - .run(); - - p.cargo("update -p baz") - .with_stderr_contains( - "\ -[UPDATING] `[..]` index -[UPDATING] baz v0.0.1 -> v0.0.2 -", - ) - .run(); -} - -#[cargo_test] -fn yanks_in_lockfiles_are_ok_with_new_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").publish(); - - p.cargo("build").run(); - - registry_path().join("3").rm_rf(); - - Package::new("bar", "0.0.1").yanked(true).publish(); - Package::new("baz", "0.0.1").publish(); - - p.change_file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - baz = "*" - "#, - ); - - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn update_with_lockfile_if_packages_missing() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1").publish(); - p.cargo("build").run(); - p.root().move_into_the_past(); - - paths::home().join(".cargo/registry").rm_rf(); - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn update_lockfile() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - println!("0.0.1"); - Package::new("bar", "0.0.1").publish(); - p.cargo("build").run(); - - Package::new("bar", "0.0.2").publish(); - Package::new("bar", "0.0.3").publish(); - paths::home().join(".cargo/registry").rm_rf(); - println!("0.0.2 update"); - p.cargo("update -p bar --precise 0.0.2") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] bar v0.0.1 -> v0.0.2 -", - ) - .run(); - - println!("0.0.2 build"); - p.cargo("build") - .with_stderr( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] [..] v0.0.2 (registry `dummy-registry`) -[COMPILING] bar v0.0.2 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - - println!("0.0.3 update"); - p.cargo("update -p bar") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] bar v0.0.2 -> v0.0.3 -", - ) - .run(); - - println!("0.0.3 build"); - p.cargo("build") - .with_stderr( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] [..] v0.0.3 (registry `dummy-registry`) -[COMPILING] bar v0.0.3 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - - println!("new dependencies update"); - Package::new("bar", "0.0.4").dep("spam", "0.2.5").publish(); - Package::new("spam", "0.2.5").publish(); - p.cargo("update -p bar") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] bar v0.0.3 -> v0.0.4 -[ADDING] spam v0.2.5 -", - ) - .run(); - - println!("new dependencies update"); - Package::new("bar", "0.0.5").publish(); - p.cargo("update -p bar") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] bar v0.0.4 -> v0.0.5 -[REMOVING] spam v0.2.5 -", - ) - .run(); -} - -#[cargo_test] -fn dev_dependency_not_used() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("baz", "0.0.1").publish(); - Package::new("bar", "0.0.1").dev_dep("baz", "*").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] [..] v0.0.1 (registry `dummy-registry`) -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn login_with_no_cargo_dir() { - // Create a config in the root directory because `login` requires the - // index to be updated, and we don't want to hit crates.io. - registry::init(); - fs::rename(paths::home().join(".cargo"), paths::root().join(".cargo")).unwrap(); - paths::home().rm_rf(); - cargo_process("login foo -v").run(); - let credentials = fs::read_to_string(paths::home().join(".cargo/credentials")).unwrap(); - assert_eq!(credentials, "[registry]\ntoken = \"foo\"\n"); -} - -#[cargo_test] -fn login_with_differently_sized_token() { - // Verify that the configuration file gets properly truncated. - registry::init(); - let credentials = paths::home().join(".cargo/credentials"); - fs::remove_file(&credentials).unwrap(); - cargo_process("login lmaolmaolmao -v").run(); - cargo_process("login lmao -v").run(); - cargo_process("login lmaolmaolmao -v").run(); - let credentials = fs::read_to_string(&credentials).unwrap(); - assert_eq!(credentials, "[registry]\ntoken = \"lmaolmaolmao\"\n"); -} - -#[cargo_test] -fn bad_license_file() { - Package::new("foo", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license-file = "foo" - description = "bar" - repository = "baz" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - p.cargo("publish -v --token sekrit") - .with_status(101) - .with_stderr_contains("[ERROR] the license file `foo` does not exist") - .run(); -} - -#[cargo_test] -fn updating_a_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.a] - path = "a" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - Package::new("bar", "0.0.1").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.1 (registry `dummy-registry`) -[COMPILING] bar v0.0.1 -[COMPILING] a v0.0.1 ([CWD]/a) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - - p.change_file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - "#, - ); - Package::new("bar", "0.1.0").publish(); - - println!("second"); - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.1.0 (registry `dummy-registry`) -[COMPILING] bar v0.1.0 -[COMPILING] a v0.0.1 ([CWD]/a) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn git_and_registry_dep() { - let b = git::repo(&paths::root().join("b")) - .file( - "Cargo.toml", - r#" - [project] - name = "b" - version = "0.0.1" - authors = [] - - [dependencies] - a = "0.0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = "0.0.1" - - [dependencies.b] - git = '{}' - "#, - b.url() - ), - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("a", "0.0.1").publish(); - - p.root().move_into_the_past(); - p.cargo("build") - .with_stderr( - "\ -[UPDATING] [..] -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] a v0.0.1 (registry `dummy-registry`) -[COMPILING] a v0.0.1 -[COMPILING] b v0.0.1 ([..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); - p.root().move_into_the_past(); - - println!("second"); - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn update_publish_then_update() { - // First generate a Cargo.lock and a clone of the registry index at the - // "head" of the current registry. - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = "0.1.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - Package::new("a", "0.1.0").publish(); - p.cargo("build").run(); - - // Next, publish a new package and back up the copy of the registry we just - // created. - Package::new("a", "0.1.1").publish(); - let registry = paths::home().join(".cargo/registry"); - let backup = paths::root().join("registry-backup"); - t!(fs::rename(®istry, &backup)); - - // Generate a Cargo.lock with the newer version, and then move the old copy - // of the registry back into place. - let p2 = project() - .at("foo2") - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = "0.1.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - p2.cargo("build").run(); - registry.rm_rf(); - t!(fs::rename(&backup, ®istry)); - t!(fs::rename( - p2.root().join("Cargo.lock"), - p.root().join("Cargo.lock") - )); - - // Finally, build the first project again (with our newer Cargo.lock) which - // should force an update of the old registry, download the new crate, and - // then build everything again. - p.cargo("build") - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] a v0.1.1 (registry `dummy-registry`) -[COMPILING] a v0.1.1 -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn fetch_downloads() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = "0.1.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("a", "0.1.0").publish(); - - p.cargo("fetch") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] a v0.1.0 (registry [..]) -", - ) - .run(); -} - -#[cargo_test] -fn update_transitive_dependency() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = "0.1.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("a", "0.1.0").dep("b", "*").publish(); - Package::new("b", "0.1.0").publish(); - - p.cargo("fetch").run(); - - Package::new("b", "0.1.1").publish(); - - p.cargo("update -pb") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] b v0.1.0 -> v0.1.1 -", - ) - .run(); - - p.cargo("build") - .with_stderr( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] b v0.1.1 (registry `dummy-registry`) -[COMPILING] b v0.1.1 -[COMPILING] a v0.1.0 -[COMPILING] foo v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn update_backtracking_ok() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - webdriver = "0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("webdriver", "0.1.0") - .dep("hyper", "0.6") - .publish(); - Package::new("hyper", "0.6.5") - .dep("openssl", "0.1") - .dep("cookie", "0.1") - .publish(); - Package::new("cookie", "0.1.0") - .dep("openssl", "0.1") - .publish(); - Package::new("openssl", "0.1.0").publish(); - - p.cargo("generate-lockfile").run(); - - Package::new("openssl", "0.1.1").publish(); - Package::new("hyper", "0.6.6") - .dep("openssl", "0.1.1") - .dep("cookie", "0.1.0") - .publish(); - - p.cargo("update -p hyper") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] hyper v0.6.5 -> v0.6.6 -[UPDATING] openssl v0.1.0 -> v0.1.1 -", - ) - .run(); -} - -#[cargo_test] -fn update_multiple_packages() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - a = "*" - b = "*" - c = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("a", "0.1.0").publish(); - Package::new("b", "0.1.0").publish(); - Package::new("c", "0.1.0").publish(); - - p.cargo("fetch").run(); - - Package::new("a", "0.1.1").publish(); - Package::new("b", "0.1.1").publish(); - Package::new("c", "0.1.1").publish(); - - p.cargo("update -pa -pb") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] a v0.1.0 -> v0.1.1 -[UPDATING] b v0.1.0 -> v0.1.1 -", - ) - .run(); - - p.cargo("update -pb -pc") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] c v0.1.0 -> v0.1.1 -", - ) - .run(); - - p.cargo("build") - .with_stderr_contains("[DOWNLOADED] a v0.1.1 (registry `dummy-registry`)") - .with_stderr_contains("[DOWNLOADED] b v0.1.1 (registry `dummy-registry`)") - .with_stderr_contains("[DOWNLOADED] c v0.1.1 (registry `dummy-registry`)") - .with_stderr_contains("[COMPILING] a v0.1.1") - .with_stderr_contains("[COMPILING] b v0.1.1") - .with_stderr_contains("[COMPILING] c v0.1.1") - .with_stderr_contains("[COMPILING] foo v0.5.0 ([..])") - .run(); -} - -#[cargo_test] -fn bundled_crate_in_registry() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = [] - - [dependencies] - bar = "0.1" - baz = "0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.1.0").publish(); - Package::new("baz", "0.1.0") - .dep("bar", "0.1.0") - .file( - "Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "bar", version = "0.1.0" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "") - .publish(); - - p.cargo("run").run(); -} - -#[cargo_test] -fn update_same_prefix_oh_my_how_was_this_a_bug() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "ugh" - version = "0.5.0" - authors = [] - - [dependencies] - foo = "0.1" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("foobar", "0.2.0").publish(); - Package::new("foo", "0.1.0") - .dep("foobar", "0.2.0") - .publish(); - - p.cargo("generate-lockfile").run(); - p.cargo("update -pfoobar --precise=0.2.0").run(); -} - -#[cargo_test] -fn use_semver() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - foo = "1.2.3-alpha.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("foo", "1.2.3-alpha.0").publish(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn use_semver_package_incorrectly() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.1.1-alpha.0" - authors = [] - "#, - ) - .file( - "b/Cargo.toml", - r#" - [project] - name = "b" - version = "0.1.0" - authors = [] - - [dependencies] - a = { version = "^0.1", path = "../a" } - "#, - ) - .file("a/src/main.rs", "fn main() {}") - .file("b/src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: no matching package found -searched package name: `a` -prerelease package needs to be specified explicitly -a = { version = \"0.1.1-alpha.0\" } -location searched: [..] -required by package `b v0.1.0 ([..])` -", - ) - .run(); -} - -#[cargo_test] -fn only_download_relevant() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [target.foo.dependencies] - foo = "*" - [dev-dependencies] - bar = "*" - [dependencies] - baz = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("foo", "0.1.0").publish(); - Package::new("bar", "0.1.0").publish(); - Package::new("baz", "0.1.0").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.1.0 ([..]) -[COMPILING] baz v0.1.0 -[COMPILING] bar v0.5.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]s -", - ) - .run(); -} - -#[cargo_test] -fn resolve_and_backtracking() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - foo = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("foo", "0.1.1") - .feature_dep("bar", "0.1", &["a", "b"]) - .publish(); - Package::new("foo", "0.1.0").publish(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn upstream_warnings_on_extra_verbose() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - foo = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("foo", "0.1.0") - .file("src/lib.rs", "fn unused() {}") - .publish(); - - p.cargo("build -vv") - .with_stderr_contains("[..]warning: function is never used[..]") - .run(); -} - -#[cargo_test] -fn disallow_network() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - foo = "*" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --frozen") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `foo` as a dependency of package `bar v0.5.0 [..]` - -Caused by: - failed to load source for dependency `foo` - -Caused by: - Unable to update registry [..] - -Caused by: - attempting to make an HTTP request, but --frozen was specified -", - ) - .run(); -} - -#[cargo_test] -fn add_dep_dont_update_registry() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - baz = { path = "baz" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "baz/Cargo.toml", - r#" - [project] - name = "baz" - version = "0.5.0" - authors = [] - - [dependencies] - remote = "0.3" - "#, - ) - .file("baz/src/lib.rs", "") - .build(); - - Package::new("remote", "0.3.4").publish(); - - p.cargo("build").run(); - - p.change_file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - baz = { path = "baz" } - remote = "0.3" - "#, - ); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.5.0 ([..]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn bump_version_dont_update_registry() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - baz = { path = "baz" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "baz/Cargo.toml", - r#" - [project] - name = "baz" - version = "0.5.0" - authors = [] - - [dependencies] - remote = "0.3" - "#, - ) - .file("baz/src/lib.rs", "") - .build(); - - Package::new("remote", "0.3.4").publish(); - - p.cargo("build").run(); - - p.change_file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.6.0" - authors = [] - - [dependencies] - baz = { path = "baz" } - "#, - ); - - p.cargo("build") - .with_stderr( - "\ -[COMPILING] bar v0.6.0 ([..]) -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn toml_lies_but_index_is_truth() { - Package::new("foo", "0.2.0").publish(); - Package::new("bar", "0.3.0") - .dep("foo", "0.2.0") - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.3.0" - authors = [] - - [dependencies] - foo = "0.1.0" - "#, - ) - .file("src/lib.rs", "extern crate foo;") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = [] - - [dependencies] - bar = "0.3" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn vv_prints_warnings() { - Package::new("foo", "0.2.0") - .file( - "src/lib.rs", - "#![deny(warnings)] fn foo() {} // unused function", - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "fo" - version = "0.5.0" - authors = [] - - [dependencies] - foo = "0.2" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -vv").run(); -} - -#[cargo_test] -fn bad_and_or_malicious_packages_rejected() { - Package::new("foo", "0.2.0") - .extra_file("foo-0.1.0/src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "fo" - version = "0.5.0" - authors = [] - - [dependencies] - foo = "0.2" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -vv") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] [..] -error: failed to download [..] - -Caused by: - failed to unpack [..] - -Caused by: - [..] contains a file at \"foo-0.1.0/src/lib.rs\" which isn't under \"foo-0.2.0\" -", - ) - .run(); -} - -#[cargo_test] -fn git_init_templatedir_missing() { - Package::new("foo", "0.2.0").dep("bar", "*").publish(); - Package::new("bar", "0.2.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "fo" - version = "0.5.0" - authors = [] - - [dependencies] - foo = "0.2" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - - remove_dir_all(paths::home().join(".cargo/registry")).unwrap(); - fs::write( - paths::home().join(".gitconfig"), - r#" - [init] - templatedir = nowhere - "#, - ) - .unwrap(); - - p.cargo("build").run(); - p.cargo("build").run(); -} - -#[cargo_test] -fn rename_deps_and_features() { - Package::new("foo", "0.1.0") - .file("src/lib.rs", "pub fn f1() {}") - .publish(); - Package::new("foo", "0.2.0") - .file("src/lib.rs", "pub fn f2() {}") - .publish(); - Package::new("bar", "0.2.0") - .add_dep( - Dependency::new("foo01", "0.1.0") - .package("foo") - .optional(true), - ) - .add_dep(Dependency::new("foo02", "0.2.0").package("foo")) - .feature("another", &["foo01"]) - .file( - "src/lib.rs", - r#" - extern crate foo02; - #[cfg(feature = "foo01")] - extern crate foo01; - - pub fn foo() { - foo02::f2(); - #[cfg(feature = "foo01")] - foo01::f1(); - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - - [dependencies] - bar = "0.2" - "#, - ) - .file( - "src/main.rs", - " - extern crate bar; - fn main() { bar::foo(); } - ", - ) - .build(); - - p.cargo("build").run(); - p.cargo("build --features bar/foo01").run(); - p.cargo("build --features bar/another").run(); -} - -#[cargo_test] -fn ignore_invalid_json_lines() { - Package::new("foo", "0.1.0").publish(); - Package::new("foo", "0.1.1").invalid_json(true).publish(); - Package::new("foo", "0.2.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - - [dependencies] - foo = '0.1.0' - foo02 = { version = '0.2.0', package = 'foo' } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn readonly_registry_still_works() { - Package::new("foo", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "a" - version = "0.5.0" - authors = [] - - [dependencies] - foo = '0.1.0' - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("generate-lockfile").run(); - p.cargo("fetch --locked").run(); - chmod_readonly(&paths::home(), true); - p.cargo("build").run(); - // make sure we un-readonly the files afterwards so "cargo clean" can remove them (#6934) - chmod_readonly(&paths::home(), false); - - fn chmod_readonly(path: &Path, readonly: bool) { - for entry in t!(path.read_dir()) { - let entry = t!(entry); - let path = entry.path(); - if t!(entry.file_type()).is_dir() { - chmod_readonly(&path, readonly); - } else { - set_readonly(&path, readonly); - } - } - set_readonly(path, readonly); - } - - fn set_readonly(path: &Path, readonly: bool) { - let mut perms = t!(path.metadata()).permissions(); - perms.set_readonly(readonly); - t!(fs::set_permissions(path, perms)); - } -} - -#[cargo_test] -fn registry_index_rejected() { - Package::new("dep", "0.1.0").publish(); - - let p = project() - .file( - ".cargo/config", - r#" - [registry] - index = "https://example.com/" - "#, - ) - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - the `registry.index` config value is no longer supported - Use `[source]` replacement to alter the default index for crates.io. -", - ) - .run(); - - p.cargo("login") - .with_status(101) - .with_stderr( - "\ -[ERROR] the `registry.index` config value is no longer supported -Use `[source]` replacement to alter the default index for crates.io. -", - ) - .run(); -} - -#[cargo_test] -fn package_lock_inside_package_is_overwritten() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = ">= 0.0.0" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - Package::new("bar", "0.0.1") - .file("src/lib.rs", "") - .file(".cargo-ok", "") - .publish(); - - p.cargo("build").run(); - - let id = SourceId::for_registry(®istry_url()).unwrap(); - let hash = cargo::util::hex::short_hash(&id); - let ok = cargo_home() - .join("registry") - .join("src") - .join(format!("-{}", hash)) - .join("bar-0.0.1") - .join(".cargo-ok"); - - assert_eq!(ok.metadata().unwrap().len(), 2); -} - -#[cargo_test] -fn ignores_unknown_index_version() { - // If the version field is not understood, it is ignored. - Package::new("bar", "1.0.0").publish(); - Package::new("bar", "1.0.1").schema_version(9999).publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "foo v0.1.0 [..]\n\ - โ””โ”€โ”€ bar v1.0.0\n\ - ", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rename_deps.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rename_deps.rs deleted file mode 100644 index cc0b71736..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rename_deps.rs +++ /dev/null @@ -1,391 +0,0 @@ -//! Tests for renaming dependencies. - -use cargo_test_support::git; -use cargo_test_support::paths; -use cargo_test_support::registry::{self, Package}; -use cargo_test_support::{basic_manifest, project}; - -#[cargo_test] -fn rename_dependency() { - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.2.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { version = "0.1.0" } - baz = { version = "0.2.0", package = "bar" } - "#, - ) - .file("src/lib.rs", "extern crate bar; extern crate baz;") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn rename_with_different_names() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - baz = { path = "bar", package = "bar" } - "#, - ) - .file("src/lib.rs", "extern crate baz;") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "random_name" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn lots_of_names() { - registry::alt_init(); - Package::new("foo", "0.1.0") - .file("src/lib.rs", "pub fn foo1() {}") - .publish(); - Package::new("foo", "0.2.0") - .file("src/lib.rs", "pub fn foo() {}") - .publish(); - Package::new("foo", "0.1.0") - .file("src/lib.rs", "pub fn foo2() {}") - .alternative(true) - .publish(); - - let g = git::repo(&paths::root().join("another")) - .file("Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("src/lib.rs", "pub fn foo3() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - foo = "0.2" - foo1 = {{ version = "0.1", package = "foo" }} - foo2 = {{ version = "0.1", registry = "alternative", package = "foo" }} - foo3 = {{ git = '{}', package = "foo" }} - foo4 = {{ path = "foo", package = "foo" }} - "#, - g.url() - ), - ) - .file( - "src/lib.rs", - " - extern crate foo; - extern crate foo1; - extern crate foo2; - extern crate foo3; - extern crate foo4; - - pub fn foo() { - foo::foo(); - foo1::foo1(); - foo2::foo2(); - foo3::foo3(); - foo4::foo4(); - } - ", - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "pub fn foo4() {}") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn rename_and_patch() { - Package::new("foo", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { version = "0.1", package = "foo" } - - [patch.crates-io] - foo = { path = "foo" } - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::foo(); }", - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "pub fn foo() {}") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn rename_twice() { - Package::new("foo", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { version = "0.1", package = "foo" } - [build-dependencies] - foo = { version = "0.1" } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] foo v0.1.0 (registry [..]) -error: the crate `test v0.1.0 ([CWD])` depends on crate `foo v0.1.0` multiple times with different names -", - ) - .run(); -} - -#[cargo_test] -fn rename_affects_fingerprint() { - Package::new("foo", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { version = "0.1", package = "foo" } - "#, - ) - .file("src/lib.rs", "extern crate foo;") - .build(); - - p.cargo("build -v").run(); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { version = "0.1", package = "foo" } - "#, - ); - - p.cargo("build -v") - .with_status(101) - .with_stderr_contains("[..]can't find crate for `foo`") - .run(); -} - -#[cargo_test] -fn can_run_doc_tests() { - Package::new("bar", "0.1.0").publish(); - Package::new("bar", "0.2.0").publish(); - - let foo = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { version = "0.1.0" } - baz = { version = "0.2.0", package = "bar" } - "#, - ) - .file( - "src/lib.rs", - " - extern crate bar; - extern crate baz; - ", - ) - .build(); - - foo.cargo("test -v") - .with_stderr_contains( - "\ -[DOCTEST] foo -[RUNNING] `rustdoc [..]--test [..]src/lib.rs \ - [..] \ - --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib \ - --extern baz=[CWD]/target/debug/deps/libbar-[..].rlib \ - [..]` -", - ) - .run(); -} - -#[cargo_test] -fn features_still_work() { - Package::new("foo", "0.1.0").publish(); - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - p1 = { path = 'a', features = ['b'] } - p2 = { path = 'b' } - "#, - ) - .file("src/lib.rs", "") - .file( - "a/Cargo.toml", - r#" - [package] - name = "p1" - version = "0.1.0" - authors = [] - - [dependencies] - b = { version = "0.1", package = "foo", optional = true } - "#, - ) - .file("a/src/lib.rs", "extern crate b;") - .file( - "b/Cargo.toml", - r#" - [package] - name = "p2" - version = "0.1.0" - authors = [] - - [dependencies] - b = { version = "0.1", package = "bar", optional = true } - - [features] - default = ['b'] - "#, - ) - .file("b/src/lib.rs", "extern crate b;") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn features_not_working() { - Package::new("foo", "0.1.0").publish(); - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.1.0" - authors = [] - - [dependencies] - a = { path = 'a', package = 'p1', optional = true } - - [features] - default = ['p1'] - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("p1", "0.1.0")) - .build(); - - p.cargo("build -v") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - feature `default` includes `p1` which is neither a dependency nor another feature -", - ) - .run(); -} - -#[cargo_test] -fn rename_with_dash() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "qwerty" - version = "0.1.0" - - [dependencies] - foo-bar = { path = 'a', package = 'a' } - "#, - ) - .file("src/lib.rs", "extern crate foo_bar;") - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("build").run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/replace.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/replace.rs deleted file mode 100644 index 363e6c54b..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/replace.rs +++ /dev/null @@ -1,1300 +0,0 @@ -//! Tests for `[replace]` table source replacement. - -use cargo_test_support::git; -use cargo_test_support::paths; -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, project}; - -#[cargo_test] -fn override_simple() { - Package::new("bar", "0.1.0").publish(); - - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = {{ git = '{}' }} - "#, - bar.url() - ), - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[UPDATING] git repository `[..]` -[COMPILING] bar v0.1.0 (file://[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn override_with_features() { - Package::new("bar", "0.1.0").publish(); - - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = {{ git = '{}', features = ["some_feature"] }} - "#, - bar.url() - ), - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] [..] index -[UPDATING] git repository `[..]` -[WARNING] replacement for `bar` uses the features mechanism. default-features and features \ -will not take effect because the replacement dependency does not support this mechanism -[COMPILING] bar v0.1.0 (file://[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn override_with_setting_default_features() { - Package::new("bar", "0.1.0").publish(); - - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = {{ git = '{}', default-features = false, features = ["none_default_feature"] }} - "#, - bar.url() - ), - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] [..] index -[UPDATING] git repository `[..]` -[WARNING] replacement for `bar` uses the features mechanism. default-features and features \ -will not take effect because the replacement dependency does not support this mechanism -[COMPILING] bar v0.1.0 (file://[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn missing_version() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - bar = { git = 'https://example.com' } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - replacements must specify a version to replace, but `[..]bar` does not -", - ) - .run(); -} - -#[cargo_test] -fn invalid_semver_version() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - - [replace] - "bar:*" = { git = 'https://example.com' } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - replacements must specify a valid semver version to replace, but `bar:*` does not -", - ) - .run(); -} - -#[cargo_test] -fn different_version() { - Package::new("bar", "0.2.0").publish(); - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = "0.2.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - replacements cannot specify a version requirement, but found one for [..] -", - ) - .run(); -} - -#[cargo_test] -fn transitive() { - Package::new("bar", "0.1.0").publish(); - Package::new("baz", "0.2.0") - .dep("bar", "0.1.0") - .file("src/lib.rs", "extern crate bar; fn baz() { bar::bar(); }") - .publish(); - - let foo = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - baz = "0.2.0" - - [replace] - "bar:0.1.0" = {{ git = '{}' }} - "#, - foo.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[UPDATING] git repository `[..]` -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.2.0 (registry [..]) -[COMPILING] bar v0.1.0 (file://[..]) -[COMPILING] baz v0.2.0 -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn persists_across_rebuilds() { - Package::new("bar", "0.1.0").publish(); - - let foo = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = {{ git = '{}' }} - "#, - foo.url() - ), - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[UPDATING] git repository `file://[..]` -[COMPILING] bar v0.1.0 (file://[..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn replace_registry_with_path() { - Package::new("bar", "0.1.0").publish(); - - let _ = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = { path = "../bar" } - "#, - ) - .file( - "src/lib.rs", - "extern crate bar; pub fn foo() { bar::bar(); }", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[COMPILING] bar v0.1.0 ([ROOT][..]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn use_a_spec_to_select() { - Package::new("baz", "0.1.1") - .file("src/lib.rs", "pub fn baz1() {}") - .publish(); - Package::new("baz", "0.2.0").publish(); - Package::new("bar", "0.1.1") - .dep("baz", "0.2") - .file( - "src/lib.rs", - "extern crate baz; pub fn bar() { baz::baz3(); }", - ) - .publish(); - - let foo = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("baz", "0.2.0")) - .file("src/lib.rs", "pub fn baz3() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - baz = "0.1" - - [replace] - "baz:0.2.0" = {{ git = '{}' }} - "#, - foo.url() - ), - ) - .file( - "src/lib.rs", - " - extern crate bar; - extern crate baz; - - pub fn local() { - baz::baz1(); - bar::bar(); - } - ", - ) - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[UPDATING] git repository `[..]` -[DOWNLOADING] crates ... -[DOWNLOADED] [..] -[DOWNLOADED] [..] -[COMPILING] [..] -[COMPILING] [..] -[COMPILING] [..] -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn override_adds_some_deps() { - Package::new("baz", "0.1.1").publish(); - Package::new("bar", "0.1.0").publish(); - - let foo = git::repo(&paths::root().join("override")) - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - - [replace] - "bar:0.1.0" = {{ git = '{}' }} - "#, - foo.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -[UPDATING] git repository `[..]` -[DOWNLOADING] crates ... -[DOWNLOADED] baz v0.1.1 (registry [..]) -[COMPILING] baz v0.1.1 -[COMPILING] bar v0.1.0 ([..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build").with_stdout("").run(); - - Package::new("baz", "0.1.2").publish(); - p.cargo("update -p") - .arg(&format!("{}#bar", foo.url())) - .with_stderr( - "\ -[UPDATING] git repository `file://[..]` -[UPDATING] `dummy-registry` index -", - ) - .run(); - p.cargo("update -p https://github.com/rust-lang/crates.io-index#bar") - .with_stderr( - "\ -[UPDATING] `dummy-registry` index -", - ) - .run(); - - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn locked_means_locked_yes_no_seriously_i_mean_locked() { - // this in theory exercises #2041 - Package::new("baz", "0.1.0").publish(); - Package::new("baz", "0.2.0").publish(); - Package::new("bar", "0.1.0").publish(); - - let foo = git::repo(&paths::root().join("override")) - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = "*" - "#, - ) - .file("src/lib.rs", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - baz = "0.1" - - [replace] - "bar:0.1.0" = {{ git = '{}' }} - "#, - foo.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - p.cargo("build").with_stdout("").run(); - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn override_wrong_name() { - Package::new("baz", "0.1.0").publish(); - - let foo = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - baz = "0.1" - - [replace] - "baz:0.1.0" = {{ git = '{}' }} - "#, - foo.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -[UPDATING] git repository [..] -[ERROR] failed to get `baz` as a dependency of package `foo v0.0.1 ([..])` - -Caused by: - no matching package for override `[..]baz:0.1.0` found - location searched: file://[..] - version required: =0.1.0 -", - ) - .run(); -} - -#[cargo_test] -fn override_with_nothing() { - Package::new("bar", "0.1.0").publish(); - - let foo = git::repo(&paths::root().join("override")) - .file("src/lib.rs", "") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - - [replace] - "bar:0.1.0" = {{ git = '{}' }} - "#, - foo.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -[UPDATING] git repository [..] -[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 ([..])` - -Caused by: - failed to load source for dependency `bar` - -Caused by: - Unable to update file://[..] - -Caused by: - Could not find Cargo.toml in `[..]` -", - ) - .run(); -} - -#[cargo_test] -fn override_wrong_version() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [replace] - "bar:0.1.0" = { git = 'https://example.com', version = '0.2.0' } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[..]` - -Caused by: - replacements cannot specify a version requirement, but found one for `[..]bar:0.1.0` -", - ) - .run(); -} - -#[cargo_test] -fn multiple_specs() { - Package::new("bar", "0.1.0").publish(); - - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = {{ git = '{0}' }} - - [replace."https://github.com/rust-lang/crates.io-index#bar:0.1.0"] - git = '{0}' - "#, - bar.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -[UPDATING] git repository [..] -[ERROR] failed to get `bar` as a dependency of package `foo v0.0.1 ([..])` - -Caused by: - overlapping replacement specifications found: - - * [..] - * [..] - - both specifications match: bar v0.1.0 -", - ) - .run(); -} - -#[cargo_test] -fn test_override_dep() { - Package::new("bar", "0.1.0").publish(); - - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = {{ git = '{0}' }} - "#, - bar.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("test -p bar") - .with_status(101) - .with_stderr_contains( - "\ -error: There are multiple `bar` packages in your project, and the [..] -Please re-run this command with [..] - [..]#bar:0.1.0 - [..]#bar:0.1.0 -", - ) - .run(); -} - -#[cargo_test] -fn update() { - Package::new("bar", "0.1.0").publish(); - - let bar = git::repo(&paths::root().join("override")) - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn bar() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = {{ git = '{0}' }} - "#, - bar.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("generate-lockfile").run(); - p.cargo("update") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] git repository `[..]` -", - ) - .run(); -} - -// foo -> near -> far -// near is overridden with itself -#[cargo_test] -fn no_override_self() { - let deps = git::repo(&paths::root().join("override")) - .file("far/Cargo.toml", &basic_manifest("far", "0.1.0")) - .file("far/src/lib.rs", "") - .file( - "near/Cargo.toml", - r#" - [package] - name = "near" - version = "0.1.0" - authors = [] - - [dependencies] - far = { path = "../far" } - "#, - ) - .file("near/src/lib.rs", "#![no_std] pub extern crate far;") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - near = {{ git = '{0}' }} - - [replace] - "near:0.1.0" = {{ git = '{0}' }} - "#, - deps.url() - ), - ) - .file("src/lib.rs", "#![no_std] pub extern crate near;") - .build(); - - p.cargo("build --verbose").run(); -} - -#[cargo_test] -fn override_an_override() { - Package::new("chrono", "0.2.0") - .dep("serde", "< 0.9") - .publish(); - Package::new("serde", "0.7.0") - .file("src/lib.rs", "pub fn serde07() {}") - .publish(); - Package::new("serde", "0.8.0") - .file("src/lib.rs", "pub fn serde08() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - chrono = "0.2" - serde = "0.8" - - [replace] - "chrono:0.2.0" = { path = "chrono" } - "serde:0.8.0" = { path = "serde" } - "#, - ) - .file( - "Cargo.lock", - r#" - [[package]] - name = "foo" - version = "0.0.1" - dependencies = [ - "chrono 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - ] - - [[package]] - name = "chrono" - version = "0.2.0" - source = "registry+https://github.com/rust-lang/crates.io-index" - replace = "chrono 0.2.0" - - [[package]] - name = "chrono" - version = "0.2.0" - dependencies = [ - "serde 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - ] - - [[package]] - name = "serde" - version = "0.7.0" - source = "registry+https://github.com/rust-lang/crates.io-index" - - [[package]] - name = "serde" - version = "0.8.0" - source = "registry+https://github.com/rust-lang/crates.io-index" - replace = "serde 0.8.0" - - [[package]] - name = "serde" - version = "0.8.0" - "#, - ) - .file( - "src/lib.rs", - " - extern crate chrono; - extern crate serde; - - pub fn foo() { - chrono::chrono(); - serde::serde08_override(); - } - ", - ) - .file( - "chrono/Cargo.toml", - r#" - [package] - name = "chrono" - version = "0.2.0" - authors = [] - - [dependencies] - serde = "< 0.9" - "#, - ) - .file( - "chrono/src/lib.rs", - " - extern crate serde; - pub fn chrono() { - serde::serde07(); - } - ", - ) - .file("serde/Cargo.toml", &basic_manifest("serde", "0.8.0")) - .file("serde/src/lib.rs", "pub fn serde08_override() {}") - .build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn overriding_nonexistent_no_spurious() { - Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); - Package::new("baz", "0.1.0").publish(); - - let bar = git::repo(&paths::root().join("override")) - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = { path = "baz" } - "#, - ) - .file("src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = {{ git = '{url}' }} - "baz:0.1.0" = {{ git = '{url}' }} - "#, - url = bar.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("build") - .with_stderr( - "\ -[WARNING] package replacement is not used: [..]baz:0.1.0 -[FINISHED] [..] -", - ) - .with_stdout("") - .run(); -} - -#[cargo_test] -fn no_warnings_when_replace_is_used_in_another_workspace_member() { - Package::new("bar", "0.1.0").publish(); - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = [ "first_crate", "second_crate"] - - [replace] - "bar:0.1.0" = { path = "local_bar" } - "#, - ) - .file( - "first_crate/Cargo.toml", - r#" - [package] - name = "first_crate" - version = "0.1.0" - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("first_crate/src/lib.rs", "") - .file( - "second_crate/Cargo.toml", - &basic_manifest("second_crate", "0.1.0"), - ) - .file("second_crate/src/lib.rs", "") - .file("local_bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("local_bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .cwd("first_crate") - .with_stdout("") - .with_stderr( - "\ -[UPDATING] `[..]` index -[COMPILING] bar v0.1.0 ([..]) -[COMPILING] first_crate v0.1.0 ([..]) -[FINISHED] [..]", - ) - .run(); - - p.cargo("build") - .cwd("second_crate") - .with_stdout("") - .with_stderr( - "\ -[COMPILING] second_crate v0.1.0 ([..]) -[FINISHED] [..]", - ) - .run(); -} - -#[cargo_test] -fn replace_to_path_dep() { - Package::new("bar", "0.1.0").dep("baz", "0.1").publish(); - Package::new("baz", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1.0" - - [replace] - "bar:0.1.0" = { path = "bar" } - "#, - ) - .file("src/lib.rs", "extern crate bar;") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = { path = "baz" } - "#, - ) - .file( - "bar/src/lib.rs", - "extern crate baz; pub fn bar() { baz::baz(); }", - ) - .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("bar/baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn override_with_default_feature() { - Package::new("another", "0.1.0").publish(); - Package::new("another", "0.1.1").dep("bar", "0.1").publish(); - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar", default-features = false } - another = "0.1" - another2 = { path = "another2" } - - [replace] - 'bar:0.1.0' = { path = "bar" } - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::bar(); }") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [features] - default = [] - "#, - ) - .file( - "bar/src/lib.rs", - r#" - #[cfg(feature = "default")] - pub fn bar() {} - "#, - ) - .file( - "another2/Cargo.toml", - r#" - [package] - name = "another2" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { version = "0.1", default-features = false } - "#, - ) - .file("another2/src/lib.rs", "") - .build(); - - p.cargo("run").run(); -} - -#[cargo_test] -fn override_plus_dep() { - Package::new("bar", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "0.1" - - [replace] - 'bar:0.1.0' = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { path = ".." } - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains("error: cyclic package dependency: [..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/required_features.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/required_features.rs deleted file mode 100644 index 04d9aa646..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/required_features.rs +++ /dev/null @@ -1,1290 +0,0 @@ -//! Tests for targets with `required-features`. - -use cargo_test_support::install::{ - assert_has_installed_exe, assert_has_not_installed_exe, cargo_home, -}; -use cargo_test_support::is_nightly; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::project; - -#[cargo_test] -fn build_bin_default_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a"] - a = [] - - [[bin]] - name = "foo" - required-features = ["a"] - "#, - ) - .file( - "src/main.rs", - r#" - extern crate foo; - - #[cfg(feature = "a")] - fn test() { - foo::foo(); - } - - fn main() {} - "#, - ) - .file("src/lib.rs", r#"#[cfg(feature = "a")] pub fn foo() {}"#) - .build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - - p.cargo("build --no-default-features").run(); - - p.cargo("build --bin=foo").run(); - assert!(p.bin("foo").is_file()); - - p.cargo("build --bin=foo --no-default-features") - .with_status(101) - .with_stderr( - "\ -error: target `foo` in package `foo` requires the features: `a` -Consider enabling them by passing, e.g., `--features=\"a\"` -", - ) - .run(); -} - -#[cargo_test] -fn build_bin_arg_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - - [[bin]] - name = "foo" - required-features = ["a"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build --features a").run(); - assert!(p.bin("foo").is_file()); -} - -#[cargo_test] -fn build_bin_multiple_required_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a", "b"] - a = [] - b = ["a"] - c = [] - - [[bin]] - name = "foo_1" - path = "src/foo_1.rs" - required-features = ["b", "c"] - - [[bin]] - name = "foo_2" - path = "src/foo_2.rs" - required-features = ["a"] - "#, - ) - .file("src/foo_1.rs", "fn main() {}") - .file("src/foo_2.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - - assert!(!p.bin("foo_1").is_file()); - assert!(p.bin("foo_2").is_file()); - - p.cargo("build --features c").run(); - - assert!(p.bin("foo_1").is_file()); - assert!(p.bin("foo_2").is_file()); - - p.cargo("build --no-default-features").run(); -} - -#[cargo_test] -fn build_example_default_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a"] - a = [] - - [[example]] - name = "foo" - required-features = ["a"] - "#, - ) - .file("examples/foo.rs", "fn main() {}") - .build(); - - p.cargo("build --example=foo").run(); - assert!(p.bin("examples/foo").is_file()); - - p.cargo("build --example=foo --no-default-features") - .with_status(101) - .with_stderr( - "\ -error: target `foo` in package `foo` requires the features: `a` -Consider enabling them by passing, e.g., `--features=\"a\"` -", - ) - .run(); -} - -#[cargo_test] -fn build_example_arg_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - - [[example]] - name = "foo" - required-features = ["a"] - "#, - ) - .file("examples/foo.rs", "fn main() {}") - .build(); - - p.cargo("build --example=foo --features a").run(); - assert!(p.bin("examples/foo").is_file()); -} - -#[cargo_test] -fn build_example_multiple_required_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a", "b"] - a = [] - b = ["a"] - c = [] - - [[example]] - name = "foo_1" - required-features = ["b", "c"] - - [[example]] - name = "foo_2" - required-features = ["a"] - "#, - ) - .file("examples/foo_1.rs", "fn main() {}") - .file("examples/foo_2.rs", "fn main() {}") - .build(); - - p.cargo("build --example=foo_1") - .with_status(101) - .with_stderr( - "\ -error: target `foo_1` in package `foo` requires the features: `b`, `c` -Consider enabling them by passing, e.g., `--features=\"b c\"` -", - ) - .run(); - p.cargo("build --example=foo_2").run(); - - assert!(!p.bin("examples/foo_1").is_file()); - assert!(p.bin("examples/foo_2").is_file()); - - p.cargo("build --example=foo_1 --features c").run(); - p.cargo("build --example=foo_2 --features c").run(); - - assert!(p.bin("examples/foo_1").is_file()); - assert!(p.bin("examples/foo_2").is_file()); - - p.cargo("build --example=foo_1 --no-default-features") - .with_status(101) - .with_stderr( - "\ -error: target `foo_1` in package `foo` requires the features: `b`, `c` -Consider enabling them by passing, e.g., `--features=\"b c\"` -", - ) - .run(); - p.cargo("build --example=foo_2 --no-default-features") - .with_status(101) - .with_stderr( - "\ -error: target `foo_2` in package `foo` requires the features: `a` -Consider enabling them by passing, e.g., `--features=\"a\"` -", - ) - .run(); -} - -#[cargo_test] -fn test_default_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a"] - a = [] - - [[test]] - name = "foo" - required-features = ["a"] - "#, - ) - .file("tests/foo.rs", "#[test]\nfn test() {}") - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test test ... ok") - .run(); - - p.cargo("test --no-default-features") - .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]") - .with_stdout("") - .run(); - - p.cargo("test --test=foo") - .with_stderr( - "\ -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test test ... ok") - .run(); - - p.cargo("test --test=foo --no-default-features") - .with_status(101) - .with_stderr( - "\ -error: target `foo` in package `foo` requires the features: `a` -Consider enabling them by passing, e.g., `--features=\"a\"` -", - ) - .run(); -} - -#[cargo_test] -fn test_arg_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - - [[test]] - name = "foo" - required-features = ["a"] - "#, - ) - .file("tests/foo.rs", "#[test]\nfn test() {}") - .build(); - - p.cargo("test --features a") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test test ... ok") - .run(); -} - -#[cargo_test] -fn test_multiple_required_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a", "b"] - a = [] - b = ["a"] - c = [] - - [[test]] - name = "foo_1" - required-features = ["b", "c"] - - [[test]] - name = "foo_2" - required-features = ["a"] - "#, - ) - .file("tests/foo_1.rs", "#[test]\nfn test() {}") - .file("tests/foo_2.rs", "#[test]\nfn test() {}") - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo_2-[..][EXE])", - ) - .with_stdout_contains("test test ... ok") - .run(); - - p.cargo("test --features c") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo_1-[..][EXE]) -[RUNNING] [..] (target/debug/deps/foo_2-[..][EXE])", - ) - .with_stdout_contains_n("test test ... ok", 2) - .run(); - - p.cargo("test --no-default-features") - .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]") - .with_stdout("") - .run(); -} - -#[cargo_test] -fn bench_default_features() { - if !is_nightly() { - // #[bench] is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a"] - a = [] - - [[bench]] - name = "foo" - required-features = ["a"] - "#, - ) - .file( - "benches/foo.rs", - r#" - #![feature(test)] - extern crate test; - - #[bench] - fn bench(_: &mut test::Bencher) { - } - "#, - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test bench ... bench: [..]") - .run(); - - p.cargo("bench --no-default-features") - .with_stderr("[FINISHED] bench [optimized] target(s) in [..]".to_string()) - .with_stdout("") - .run(); - - p.cargo("bench --bench=foo") - .with_stderr( - "\ -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test bench ... bench: [..]") - .run(); - - p.cargo("bench --bench=foo --no-default-features") - .with_status(101) - .with_stderr( - "\ -error: target `foo` in package `foo` requires the features: `a` -Consider enabling them by passing, e.g., `--features=\"a\"` -", - ) - .run(); -} - -#[cargo_test] -fn bench_arg_features() { - if !is_nightly() { - // #[bench] is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - - [[bench]] - name = "foo" - required-features = ["a"] - "#, - ) - .file( - "benches/foo.rs", - r#" - #![feature(test)] - extern crate test; - - #[bench] - fn bench(_: &mut test::Bencher) { - } - "#, - ) - .build(); - - p.cargo("bench --features a") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test bench ... bench: [..]") - .run(); -} - -#[cargo_test] -fn bench_multiple_required_features() { - if !is_nightly() { - // #[bench] is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a", "b"] - a = [] - b = ["a"] - c = [] - - [[bench]] - name = "foo_1" - required-features = ["b", "c"] - - [[bench]] - name = "foo_2" - required-features = ["a"] - "#, - ) - .file( - "benches/foo_1.rs", - r#" - #![feature(test)] - extern crate test; - - #[bench] - fn bench(_: &mut test::Bencher) { - } - "#, - ) - .file( - "benches/foo_2.rs", - r#" - #![feature(test)] - extern crate test; - - #[bench] - fn bench(_: &mut test::Bencher) { - } - "#, - ) - .build(); - - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo_2-[..][EXE])", - ) - .with_stdout_contains("test bench ... bench: [..]") - .run(); - - p.cargo("bench --features c") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo_1-[..][EXE]) -[RUNNING] [..] (target/release/deps/foo_2-[..][EXE])", - ) - .with_stdout_contains_n("test bench ... bench: [..]", 2) - .run(); - - p.cargo("bench --no-default-features") - .with_stderr("[FINISHED] bench [optimized] target(s) in [..]") - .with_stdout("") - .run(); -} - -#[cargo_test] -fn install_default_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a"] - a = [] - - [[bin]] - name = "foo" - required-features = ["a"] - - [[example]] - name = "foo" - required-features = ["a"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("examples/foo.rs", "fn main() {}") - .build(); - - p.cargo("install --path .").run(); - assert_has_installed_exe(cargo_home(), "foo"); - p.cargo("uninstall foo").run(); - - p.cargo("install --path . --no-default-features") - .with_status(101) - .with_stderr( - "\ -[INSTALLING] foo v0.0.1 ([..]) -[FINISHED] release [optimized] target(s) in [..] -[ERROR] no binaries are available for install using the selected features -", - ) - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); - - p.cargo("install --path . --bin=foo").run(); - assert_has_installed_exe(cargo_home(), "foo"); - p.cargo("uninstall foo").run(); - - p.cargo("install --path . --bin=foo --no-default-features") - .with_status(101) - .with_stderr( - "\ -[INSTALLING] foo v0.0.1 ([..]) -[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ - `[..]target` - -Caused by: - target `foo` in package `foo` requires the features: `a` - Consider enabling them by passing, e.g., `--features=\"a\"` -", - ) - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); - - p.cargo("install --path . --example=foo").run(); - assert_has_installed_exe(cargo_home(), "foo"); - p.cargo("uninstall foo").run(); - - p.cargo("install --path . --example=foo --no-default-features") - .with_status(101) - .with_stderr( - "\ -[INSTALLING] foo v0.0.1 ([..]) -[ERROR] failed to compile `foo v0.0.1 ([..])`, intermediate artifacts can be found at \ - `[..]target` - -Caused by: - target `foo` in package `foo` requires the features: `a` - Consider enabling them by passing, e.g., `--features=\"a\"` -", - ) - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); -} - -#[cargo_test] -fn install_arg_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - - [[bin]] - name = "foo" - required-features = ["a"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("install --features a").run(); - assert_has_installed_exe(cargo_home(), "foo"); - p.cargo("uninstall foo").run(); -} - -#[cargo_test] -fn install_multiple_required_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a", "b"] - a = [] - b = ["a"] - c = [] - - [[bin]] - name = "foo_1" - path = "src/foo_1.rs" - required-features = ["b", "c"] - - [[bin]] - name = "foo_2" - path = "src/foo_2.rs" - required-features = ["a"] - "#, - ) - .file("src/foo_1.rs", "fn main() {}") - .file("src/foo_2.rs", "fn main() {}") - .build(); - - p.cargo("install --path .").run(); - assert_has_not_installed_exe(cargo_home(), "foo_1"); - assert_has_installed_exe(cargo_home(), "foo_2"); - p.cargo("uninstall foo").run(); - - p.cargo("install --path . --features c").run(); - assert_has_installed_exe(cargo_home(), "foo_1"); - assert_has_installed_exe(cargo_home(), "foo_2"); - p.cargo("uninstall foo").run(); - - p.cargo("install --path . --no-default-features") - .with_status(101) - .with_stderr( - "\ -[INSTALLING] foo v0.0.1 ([..]) -[FINISHED] release [optimized] target(s) in [..] -[ERROR] no binaries are available for install using the selected features -", - ) - .run(); - assert_has_not_installed_exe(cargo_home(), "foo_1"); - assert_has_not_installed_exe(cargo_home(), "foo_2"); -} - -#[cargo_test] -fn dep_feature_in_toml() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar", features = ["a"] } - - [[bin]] - name = "foo" - required-features = ["bar/a"] - - [[example]] - name = "foo" - required-features = ["bar/a"] - - [[test]] - name = "foo" - required-features = ["bar/a"] - - [[bench]] - name = "foo" - required-features = ["bar/a"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("examples/foo.rs", "fn main() {}") - .file("tests/foo.rs", "#[test]\nfn test() {}") - .file( - "benches/foo.rs", - r#" - #![feature(test)] - extern crate test; - - #[bench] - fn bench(_: &mut test::Bencher) { - } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - a = [] - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - // bin - p.cargo("build --bin=foo").run(); - assert!(p.bin("foo").is_file()); - - // example - p.cargo("build --example=foo").run(); - assert!(p.bin("examples/foo").is_file()); - - // test - p.cargo("test --test=foo") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test test ... ok") - .run(); - - // bench - if is_nightly() { - p.cargo("bench --bench=foo") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test bench ... bench: [..]") - .run(); - } - - // install - p.cargo("install").run(); - assert_has_installed_exe(cargo_home(), "foo"); - p.cargo("uninstall foo").run(); -} - -#[cargo_test] -fn dep_feature_in_cmd_line() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar" } - - [[bin]] - name = "foo" - required-features = ["bar/a"] - - [[example]] - name = "foo" - required-features = ["bar/a"] - - [[test]] - name = "foo" - required-features = ["bar/a"] - - [[bench]] - name = "foo" - required-features = ["bar/a"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("examples/foo.rs", "fn main() {}") - .file( - "tests/foo.rs", - r#" - #[test] - fn bin_is_built() { - let s = format!("target/debug/foo{}", std::env::consts::EXE_SUFFIX); - let p = std::path::Path::new(&s); - assert!(p.exists(), "foo does not exist"); - } - "#, - ) - .file( - "benches/foo.rs", - r#" - #![feature(test)] - extern crate test; - - #[bench] - fn bench(_: &mut test::Bencher) { - } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.0.1" - authors = [] - - [features] - a = [] - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - // This is a no-op - p.cargo("build").with_stderr("[FINISHED] dev [..]").run(); - assert!(!p.bin("foo").is_file()); - - // bin - p.cargo("build --bin=foo") - .with_status(101) - .with_stderr( - "\ -error: target `foo` in package `foo` requires the features: `bar/a` -Consider enabling them by passing, e.g., `--features=\"bar/a\"` -", - ) - .run(); - - p.cargo("build --bin=foo --features bar/a").run(); - assert!(p.bin("foo").is_file()); - - // example - p.cargo("build --example=foo") - .with_status(101) - .with_stderr( - "\ -error: target `foo` in package `foo` requires the features: `bar/a` -Consider enabling them by passing, e.g., `--features=\"bar/a\"` -", - ) - .run(); - - p.cargo("build --example=foo --features bar/a").run(); - assert!(p.bin("examples/foo").is_file()); - - // test - // This is a no-op, since no tests are enabled - p.cargo("test") - .with_stderr("[FINISHED] test [unoptimized + debuginfo] target(s) in [..]") - .with_stdout("") - .run(); - - // Delete the target directory so this can check if the main.rs gets built. - p.build_dir().rm_rf(); - p.cargo("test --test=foo --features bar/a") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test bin_is_built ... ok") - .run(); - - // bench - if is_nightly() { - p.cargo("bench") - .with_stderr("[FINISHED] bench [optimized] target(s) in [..]") - .with_stdout("") - .run(); - - p.cargo("bench --bench=foo --features bar/a") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test bench ... bench: [..]") - .run(); - } - - // install - p.cargo("install --path .") - .with_status(101) - .with_stderr( - "\ -[INSTALLING] foo v0.0.1 ([..]) -[FINISHED] release [optimized] target(s) in [..] -[ERROR] no binaries are available for install using the selected features -", - ) - .run(); - assert_has_not_installed_exe(cargo_home(), "foo"); - - p.cargo("install --features bar/a").run(); - assert_has_installed_exe(cargo_home(), "foo"); - p.cargo("uninstall foo").run(); -} - -#[cargo_test] -fn test_skips_compiling_bin_with_missing_required_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - a = [] - - [[bin]] - name = "bin_foo" - path = "src/bin/foo.rs" - required-features = ["a"] - "#, - ) - .file("src/bin/foo.rs", "extern crate bar; fn main() {}") - .file("tests/foo.rs", "") - .file("benches/foo.rs", "") - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("running 0 tests") - .run(); - - p.cargo("test --features a -j 1") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -error[E0463]: can't find crate for `bar`", - ) - .run(); - - if is_nightly() { - p.cargo("bench") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] [..] (target/release/deps/foo-[..][EXE])", - ) - .with_stdout_contains("running 0 tests") - .run(); - - p.cargo("bench --features a -j 1") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -error[E0463]: can't find crate for `bar`", - ) - .run(); - } -} - -#[cargo_test] -fn run_default() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = [] - a = [] - - [[bin]] - name = "foo" - required-features = ["a"] - "#, - ) - .file("src/lib.rs", "") - .file("src/main.rs", "extern crate foo; fn main() {}") - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr( - "\ -error: target `foo` in package `foo` requires the features: `a` -Consider enabling them by passing, e.g., `--features=\"a\"` -", - ) - .run(); - - p.cargo("run --features a").run(); -} - -#[cargo_test] -fn run_default_multiple_required_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - default = ["a"] - a = [] - b = [] - - [[bin]] - name = "foo1" - path = "src/foo1.rs" - required-features = ["a"] - - [[bin]] - name = "foo3" - path = "src/foo3.rs" - required-features = ["b"] - - [[bin]] - name = "foo2" - path = "src/foo2.rs" - required-features = ["b"] - "#, - ) - .file("src/lib.rs", "") - .file("src/foo1.rs", "extern crate foo; fn main() {}") - .file("src/foo3.rs", "extern crate foo; fn main() {}") - .file("src/foo2.rs", "extern crate foo; fn main() {}") - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr( - "\ -error: `cargo run` could not determine which binary to run[..] -available binaries: foo1, foo2, foo3", - ) - .run(); -} - -#[cargo_test] -fn renamed_required_features() { - // Test that required-features uses renamed package feature names. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [[bin]] - name = "x" - required-features = ["a1/f1"] - - [dependencies] - a1 = {path="a1", package="a"} - a2 = {path="a2", package="a"} - "#, - ) - .file( - "src/bin/x.rs", - r#" - fn main() { - a1::f(); - a2::f(); - } - "#, - ) - .file( - "a1/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [features] - f1 = [] - "#, - ) - .file( - "a1/src/lib.rs", - r#" - pub fn f() { - if cfg!(feature="f1") { - println!("a1 f1"); - } - } - "#, - ) - .file( - "a2/Cargo.toml", - r#" - [package] - name = "a" - version = "0.2.0" - - [features] - f2 = [] - "#, - ) - .file( - "a2/src/lib.rs", - r#" - pub fn f() { - if cfg!(feature="f2") { - println!("a2 f2"); - } - } - "#, - ) - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr( - "\ -[ERROR] target `x` in package `foo` requires the features: `a1/f1` -Consider enabling them by passing, e.g., `--features=\"a1/f1\"` -", - ) - .run(); - - p.cargo("build --features a1/f1").run(); - p.rename_run("x", "x_with_f1").with_stdout("a1 f1").run(); - - p.cargo("build --features a1/f1,a2/f2").run(); - p.rename_run("x", "x_with_f1_f2") - .with_stdout("a1 f1\na2 f2") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/run.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/run.rs deleted file mode 100644 index ee7c138da..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/run.rs +++ /dev/null @@ -1,1434 +0,0 @@ -//! Tests for the `cargo run` command. - -use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, project, Project}; -use cargo_util::paths::dylib_path_envvar; - -#[cargo_test] -fn simple() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("run") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo[EXE]`", - ) - .with_stdout("hello") - .run(); - assert!(p.bin("foo").is_file()); -} - -#[cargo_test] -fn quiet_arg() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("run -q").with_stderr("").with_stdout("hello").run(); - - p.cargo("run --quiet") - .with_stderr("") - .with_stdout("hello") - .run(); -} - -#[cargo_test] -fn quiet_arg_and_verbose_arg() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("run -q -v") - .with_status(101) - .with_stderr("[ERROR] cannot set both --verbose and --quiet") - .run(); -} - -#[cargo_test] -fn quiet_arg_and_verbose_config() { - let p = project() - .file( - ".cargo/config", - r#" - [term] - verbose = true - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("run -q").with_stderr("").with_stdout("hello").run(); -} - -#[cargo_test] -fn verbose_arg_and_quiet_config() { - let p = project() - .file( - ".cargo/config", - r#" - [term] - quiet = true - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("run -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo[EXE]`", - ) - .with_stdout("hello") - .run(); -} - -#[cargo_test] -fn quiet_config_and_verbose_config() { - let p = project() - .file( - ".cargo/config", - r#" - [term] - verbose = true - quiet = true - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr("[ERROR] cannot set both `term.verbose` and `term.quiet`") - .run(); -} - -#[cargo_test] -fn simple_with_args() { - let p = project() - .file( - "src/main.rs", - r#" - fn main() { - assert_eq!(std::env::args().nth(1).unwrap(), "hello"); - assert_eq!(std::env::args().nth(2).unwrap(), "world"); - } - "#, - ) - .build(); - - p.cargo("run hello world").run(); -} - -#[cfg(unix)] -#[cargo_test] -fn simple_with_non_utf8_args() { - use std::os::unix::ffi::OsStrExt; - - let p = project() - .file( - "src/main.rs", - r#" - use std::ffi::OsStr; - use std::os::unix::ffi::OsStrExt; - - fn main() { - assert_eq!(std::env::args_os().nth(1).unwrap(), OsStr::from_bytes(b"hello")); - assert_eq!(std::env::args_os().nth(2).unwrap(), OsStr::from_bytes(b"ab\xffcd")); - } - "#, - ) - .build(); - - p.cargo("run") - .arg("hello") - .arg(std::ffi::OsStr::from_bytes(b"ab\xFFcd")) - .run(); -} - -#[cargo_test] -fn exit_code() { - let p = project() - .file("src/main.rs", "fn main() { std::process::exit(2); }") - .build(); - - let mut output = String::from( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target[..]` -", - ); - if !cfg!(unix) { - output.push_str( - "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit [..]: 2)", - ); - } - p.cargo("run").with_status(2).with_stderr(output).run(); -} - -#[cargo_test] -fn exit_code_verbose() { - let p = project() - .file("src/main.rs", "fn main() { std::process::exit(2); }") - .build(); - - let mut output = String::from( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target[..]` -", - ); - if !cfg!(unix) { - output.push_str( - "[ERROR] process didn't exit successfully: `target[..]foo[..]` (exit [..]: 2)", - ); - } - - p.cargo("run -v").with_status(2).with_stderr(output).run(); -} - -#[cargo_test] -fn no_main_file() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("run") - .with_status(101) - .with_stderr( - "[ERROR] a bin target must be available \ - for `cargo run`\n", - ) - .run(); -} - -#[cargo_test] -fn too_many_bins() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "") - .file("src/bin/b.rs", "") - .build(); - - // Using [..] here because the order is not stable - p.cargo("run") - .with_status(101) - .with_stderr( - "[ERROR] `cargo run` could not determine which binary to run. \ - Use the `--bin` option to specify a binary, or the \ - `default-run` manifest key.\ - \navailable binaries: [..]\n", - ) - .run(); -} - -#[cargo_test] -fn specify_name() { - let p = project() - .file("src/lib.rs", "") - .file( - "src/bin/a.rs", - r#" - #[allow(unused_extern_crates)] - extern crate foo; - fn main() { println!("hello a.rs"); } - "#, - ) - .file( - "src/bin/b.rs", - r#" - #[allow(unused_extern_crates)] - extern crate foo; - fn main() { println!("hello b.rs"); } - "#, - ) - .build(); - - p.cargo("run --bin a -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc [..] src/lib.rs [..]` -[RUNNING] `rustc [..] src/bin/a.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/a[EXE]`", - ) - .with_stdout("hello a.rs") - .run(); - - p.cargo("run --bin b -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] src/bin/b.rs [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/b[EXE]`", - ) - .with_stdout("hello b.rs") - .run(); -} - -#[cargo_test] -fn specify_default_run() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - default-run = "a" - "#, - ) - .file("src/lib.rs", "") - .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#) - .file("src/bin/b.rs", r#"fn main() { println!("hello B"); }"#) - .build(); - - p.cargo("run").with_stdout("hello A").run(); - p.cargo("run --bin a").with_stdout("hello A").run(); - p.cargo("run --bin b").with_stdout("hello B").run(); -} - -#[cargo_test] -fn bogus_default_run() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - default-run = "b" - "#, - ) - .file("src/lib.rs", "") - .file("src/bin/a.rs", r#"fn main() { println!("hello A"); }"#) - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - default-run target `b` not found - - Did you mean `a`? -", - ) - .run(); -} - -#[cargo_test] -fn run_example() { - let p = project() - .file("src/lib.rs", "") - .file("examples/a.rs", r#"fn main() { println!("example"); }"#) - .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#) - .build(); - - p.cargo("run --example a") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/examples/a[EXE]`", - ) - .with_stdout("example") - .run(); -} - -#[cargo_test] -fn run_library_example() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - [[example]] - name = "bar" - crate_type = ["lib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/bar.rs", "fn foo() {}") - .build(); - - p.cargo("run --example bar") - .with_status(101) - .with_stderr("[ERROR] example target `bar` is a library and cannot be executed") - .run(); -} - -#[cargo_test] -fn run_bin_example() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - [[example]] - name = "bar" - crate_type = ["bin"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/bar.rs", r#"fn main() { println!("example"); }"#) - .build(); - - p.cargo("run --example bar") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/examples/bar[EXE]`", - ) - .with_stdout("example") - .run(); -} - -fn autodiscover_examples_project(rust_edition: &str, autoexamples: Option) -> Project { - let autoexamples = match autoexamples { - None => "".to_string(), - Some(bool) => format!("autoexamples = {}", bool), - }; - project() - .file( - "Cargo.toml", - &format!( - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - edition = "{rust_edition}" - {autoexamples} - - [features] - magic = [] - - [[example]] - name = "do_magic" - required-features = ["magic"] - "#, - rust_edition = rust_edition, - autoexamples = autoexamples - ), - ) - .file("examples/a.rs", r#"fn main() { println!("example"); }"#) - .file( - "examples/do_magic.rs", - r#" - fn main() { println!("magic example"); } - "#, - ) - .build() -} - -#[cargo_test] -fn run_example_autodiscover_2015() { - let p = autodiscover_examples_project("2015", None); - p.cargo("run --example a") - .with_status(101) - .with_stderr( - "warning: \ -An explicit [[example]] section is specified in Cargo.toml which currently -disables Cargo from automatically inferring other example targets. -This inference behavior will change in the Rust 2018 edition and the following -files will be included as a example target: - -* [..]a.rs - -This is likely to break cargo build or cargo test as these files may not be -ready to be compiled as a example target today. You can future-proof yourself -and disable this warning by adding `autoexamples = false` to your [package] -section. You may also move the files to a location where Cargo would not -automatically infer them to be a target, such as in subfolders. - -For more information on this warning you can consult -https://github.com/rust-lang/cargo/issues/5330 -error: no example target named `a` -", - ) - .run(); -} - -#[cargo_test] -fn run_example_autodiscover_2015_with_autoexamples_enabled() { - let p = autodiscover_examples_project("2015", Some(true)); - p.cargo("run --example a") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/examples/a[EXE]`", - ) - .with_stdout("example") - .run(); -} - -#[cargo_test] -fn run_example_autodiscover_2015_with_autoexamples_disabled() { - let p = autodiscover_examples_project("2015", Some(false)); - p.cargo("run --example a") - .with_status(101) - .with_stderr("error: no example target named `a`\n") - .run(); -} - -#[cargo_test] -fn run_example_autodiscover_2018() { - let p = autodiscover_examples_project("2018", None); - p.cargo("run --example a") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/examples/a[EXE]`", - ) - .with_stdout("example") - .run(); -} - -#[cargo_test] -fn autobins_disables() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - autobins = false - "#, - ) - .file("src/lib.rs", "pub mod bin;") - .file("src/bin/mod.rs", "// empty") - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr("[ERROR] a bin target must be available for `cargo run`") - .run(); -} - -#[cargo_test] -fn run_bins() { - let p = project() - .file("src/lib.rs", "") - .file("examples/a.rs", r#"fn main() { println!("example"); }"#) - .file("src/bin/a.rs", r#"fn main() { println!("bin"); }"#) - .build(); - - p.cargo("run --bins") - .with_status(1) - .with_stderr_contains( - "error: Found argument '--bins' which wasn't expected, or isn't valid in this context", - ) - .run(); -} - -#[cargo_test] -fn run_with_filename() { - let p = project() - .file("src/lib.rs", "") - .file( - "src/bin/a.rs", - r#" - extern crate foo; - fn main() { println!("hello a.rs"); } - "#, - ) - .file("examples/a.rs", r#"fn main() { println!("example"); }"#) - .build(); - - p.cargo("run --bin bin.rs") - .with_status(101) - .with_stderr("[ERROR] no bin target named `bin.rs`") - .run(); - - p.cargo("run --bin a.rs") - .with_status(101) - .with_stderr( - "\ -[ERROR] no bin target named `a.rs` - -Did you mean `a`?", - ) - .run(); - - p.cargo("run --example example.rs") - .with_status(101) - .with_stderr("[ERROR] no example target named `example.rs`") - .run(); - - p.cargo("run --example a.rs") - .with_status(101) - .with_stderr( - "\ -[ERROR] no example target named `a.rs` - -Did you mean `a`?", - ) - .run(); -} - -#[cargo_test] -fn either_name_or_example() { - let p = project() - .file("src/bin/a.rs", r#"fn main() { println!("hello a.rs"); }"#) - .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#) - .build(); - - p.cargo("run --bin a --example b") - .with_status(101) - .with_stderr( - "[ERROR] `cargo run` can run at most one \ - executable, but multiple were \ - specified", - ) - .run(); -} - -#[cargo_test] -fn one_bin_multiple_examples() { - let p = project() - .file("src/lib.rs", "") - .file( - "src/bin/main.rs", - r#"fn main() { println!("hello main.rs"); }"#, - ) - .file("examples/a.rs", r#"fn main() { println!("hello a.rs"); }"#) - .file("examples/b.rs", r#"fn main() { println!("hello b.rs"); }"#) - .build(); - - p.cargo("run") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/main[EXE]`", - ) - .with_stdout("hello main.rs") - .run(); -} - -#[cargo_test] -fn example_with_release_flag() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - version = "*" - path = "bar" - "#, - ) - .file( - "examples/a.rs", - r#" - extern crate bar; - - fn main() { - if cfg!(debug_assertions) { - println!("slow1") - } else { - println!("fast1") - } - bar::baz(); - } - "#, - ) - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file( - "bar/src/bar.rs", - r#" - pub fn baz() { - if cfg!(debug_assertions) { - println!("slow2") - } else { - println!("fast2") - } - } - "#, - ) - .build(); - - p.cargo("run -v --release --example a") - .with_stderr( - "\ -[COMPILING] bar v0.5.0 ([CWD]/bar) -[RUNNING] `rustc --crate-name bar bar/src/bar.rs [..]--crate-type lib \ - --emit=[..]link \ - -C opt-level=3[..]\ - -C metadata=[..] \ - --out-dir [CWD]/target/release/deps \ - -L dependency=[CWD]/target/release/deps` -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name a examples/a.rs [..]--crate-type bin \ - --emit=[..]link \ - -C opt-level=3[..]\ - -C metadata=[..] \ - --out-dir [CWD]/target/release/examples \ - -L dependency=[CWD]/target/release/deps \ - --extern bar=[CWD]/target/release/deps/libbar-[..].rlib` -[FINISHED] release [optimized] target(s) in [..] -[RUNNING] `target/release/examples/a[EXE]` -", - ) - .with_stdout( - "\ -fast1 -fast2", - ) - .run(); - - p.cargo("run -v --example a") - .with_stderr( - "\ -[COMPILING] bar v0.5.0 ([CWD]/bar) -[RUNNING] `rustc --crate-name bar bar/src/bar.rs [..]--crate-type lib \ - --emit=[..]link[..]\ - -C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [CWD]/target/debug/deps \ - -L dependency=[CWD]/target/debug/deps` -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name a examples/a.rs [..]--crate-type bin \ - --emit=[..]link[..]\ - -C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [CWD]/target/debug/examples \ - -L dependency=[CWD]/target/debug/deps \ - --extern bar=[CWD]/target/debug/deps/libbar-[..].rlib` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/examples/a[EXE]` -", - ) - .with_stdout( - "\ -slow1 -slow2", - ) - .run(); -} - -#[cargo_test] -fn run_dylib_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - "src/main.rs", - r#"extern crate bar; fn main() { bar::bar(); }"#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - crate-type = ["dylib"] - "#, - ) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("run hello world").run(); -} - -#[cargo_test] -fn run_with_bin_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies.bar] - path = "bar" - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [[bin]] - name = "bar" - "#, - ) - .file("bar/src/main.rs", r#"fn main() { println!("bar"); }"#) - .build(); - - p.cargo("run") - .with_stderr( - "\ -[WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar` which is missing a lib target -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo[EXE]`", - ) - .with_stdout("hello") - .run(); -} - -#[cargo_test] -fn run_with_bin_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies.bar1] - path = "bar1" - [dependencies.bar2] - path = "bar2" - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .file( - "bar1/Cargo.toml", - r#" - [package] - name = "bar1" - version = "0.0.1" - authors = [] - - [[bin]] - name = "bar1" - "#, - ) - .file("bar1/src/main.rs", r#"fn main() { println!("bar1"); }"#) - .file( - "bar2/Cargo.toml", - r#" - [package] - name = "bar2" - version = "0.0.1" - authors = [] - - [[bin]] - name = "bar2" - "#, - ) - .file("bar2/src/main.rs", r#"fn main() { println!("bar2"); }"#) - .build(); - - p.cargo("run") - .with_stderr( - "\ -[WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar1` which is missing a lib target -[WARNING] foo v0.0.1 ([CWD]) ignoring invalid dependency `bar2` which is missing a lib target -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo[EXE]`", - ) - .with_stdout("hello") - .run(); -} - -#[cargo_test] -fn run_with_bin_dep_in_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo1", "foo2"] - "#, - ) - .file( - "foo1/Cargo.toml", - r#" - [package] - name = "foo1" - version = "0.0.1" - - [dependencies.bar1] - path = "bar1" - "#, - ) - .file("foo1/src/main.rs", r#"fn main() { println!("hello"); }"#) - .file( - "foo1/bar1/Cargo.toml", - r#" - [package] - name = "bar1" - version = "0.0.1" - authors = [] - - [[bin]] - name = "bar1" - "#, - ) - .file( - "foo1/bar1/src/main.rs", - r#"fn main() { println!("bar1"); }"#, - ) - .file( - "foo2/Cargo.toml", - r#" - [package] - name = "foo2" - version = "0.0.1" - - [dependencies.bar2] - path = "bar2" - "#, - ) - .file("foo2/src/main.rs", r#"fn main() { println!("hello"); }"#) - .file( - "foo2/bar2/Cargo.toml", - r#" - [package] - name = "bar2" - version = "0.0.1" - authors = [] - - [[bin]] - name = "bar2" - "#, - ) - .file( - "foo2/bar2/src/main.rs", - r#"fn main() { println!("bar2"); }"#, - ) - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr( - "\ -[ERROR] `cargo run` could not determine which binary to run[..] -available binaries: bar1, bar2, foo1, foo2", - ) - .run(); - - p.cargo("run --bin foo1") - .with_stderr( - "\ -[WARNING] foo1 v0.0.1 ([CWD]/foo1) ignoring invalid dependency `bar1` which is missing a lib target -[WARNING] foo2 v0.0.1 ([CWD]/foo2) ignoring invalid dependency `bar2` which is missing a lib target -[COMPILING] foo1 v0.0.1 ([CWD]/foo1) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `target/debug/foo1[EXE]`", - ) - .with_stdout("hello") - .run(); -} - -#[cargo_test] -fn release_works() { - let p = project() - .file( - "src/main.rs", - r#" - fn main() { if cfg!(debug_assertions) { panic!() } } - "#, - ) - .build(); - - p.cargo("run --release") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] release [optimized] target(s) in [..] -[RUNNING] `target/release/foo[EXE]` -", - ) - .run(); - assert!(p.release_bin("foo").is_file()); -} - -#[cargo_test] -fn release_short_works() { - let p = project() - .file( - "src/main.rs", - r#" - fn main() { if cfg!(debug_assertions) { panic!() } } - "#, - ) - .build(); - - p.cargo("run -r") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] release [optimized] target(s) in [..] -[RUNNING] `target/release/foo[EXE]` -", - ) - .run(); - assert!(p.release_bin("foo").is_file()); -} - -#[cargo_test] -fn run_bin_different_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "bar" - "#, - ) - .file("src/bar.rs", "fn main() {}") - .build(); - - p.cargo("run").run(); -} - -#[cargo_test] -fn dashes_are_forwarded() { - let p = project() - .file( - "src/bin/bar.rs", - r#" - fn main() { - let s: Vec = std::env::args().collect(); - assert_eq!(s[1], "--"); - assert_eq!(s[2], "a"); - assert_eq!(s[3], "--"); - assert_eq!(s[4], "b"); - } - "#, - ) - .build(); - - p.cargo("run -- -- a -- b").run(); -} - -#[cargo_test] -fn run_from_executable_folder() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - let cwd = p.root().join("target").join("debug"); - p.cargo("build").run(); - - p.cargo("run") - .cwd(cwd) - .with_stderr( - "[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n\ - [RUNNING] `./foo[EXE]`", - ) - .with_stdout("hello") - .run(); -} - -#[cargo_test] -fn run_with_library_paths() { - let p = project(); - - // Only link search directories within the target output directory are - // propagated through to dylib_path_envvar() (see #3366). - let mut dir1 = p.target_debug_dir(); - dir1.push("foo\\backslash"); - - let mut dir2 = p.target_debug_dir(); - dir2.push("dir=containing=equal=signs"); - - let p = p - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - &format!( - r##" - fn main() {{ - println!(r#"cargo:rustc-link-search=native={}"#); - println!(r#"cargo:rustc-link-search={}"#); - }} - "##, - dir1.display(), - dir2.display() - ), - ) - .file( - "src/main.rs", - &format!( - r##" - fn main() {{ - let search_path = std::env::var_os("{}").unwrap(); - let paths = std::env::split_paths(&search_path).collect::>(); - println!("{{:#?}}", paths); - assert!(paths.contains(&r#"{}"#.into())); - assert!(paths.contains(&r#"{}"#.into())); - }} - "##, - dylib_path_envvar(), - dir1.display(), - dir2.display() - ), - ) - .build(); - - p.cargo("run").run(); -} - -#[cargo_test] -fn library_paths_sorted_alphabetically() { - let p = project(); - - let mut dir1 = p.target_debug_dir(); - dir1.push("zzzzzzz"); - - let mut dir2 = p.target_debug_dir(); - dir2.push("BBBBBBB"); - - let mut dir3 = p.target_debug_dir(); - dir3.push("aaaaaaa"); - - let p = p - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file( - "build.rs", - &format!( - r##" - fn main() {{ - println!(r#"cargo:rustc-link-search=native={}"#); - println!(r#"cargo:rustc-link-search=native={}"#); - println!(r#"cargo:rustc-link-search=native={}"#); - }} - "##, - dir1.display(), - dir2.display(), - dir3.display() - ), - ) - .file( - "src/main.rs", - &format!( - r##" - fn main() {{ - let search_path = std::env::var_os("{}").unwrap(); - let paths = std::env::split_paths(&search_path).collect::>(); - // ASCII case-sensitive sort - assert_eq!("BBBBBBB", paths[0].file_name().unwrap().to_string_lossy()); - assert_eq!("aaaaaaa", paths[1].file_name().unwrap().to_string_lossy()); - assert_eq!("zzzzzzz", paths[2].file_name().unwrap().to_string_lossy()); - }} - "##, - dylib_path_envvar() - ), - ) - .build(); - - p.cargo("run").run(); -} - -#[cargo_test] -fn fail_no_extra_verbose() { - let p = project() - .file("src/main.rs", "fn main() { std::process::exit(1); }") - .build(); - - p.cargo("run -q") - .with_status(1) - .with_stdout("") - .with_stderr("") - .run(); -} - -#[cargo_test] -fn run_multiple_packages() { - let p = project() - .no_manifest() - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [workspace] - - [dependencies] - d1 = { path = "d1" } - d2 = { path = "d2" } - d3 = { path = "../d3" } # outside of the workspace - - [[bin]] - name = "foo" - "#, - ) - .file("foo/src/foo.rs", "fn main() { println!(\"foo\"); }") - .file("foo/d1/Cargo.toml", &basic_bin_manifest("d1")) - .file("foo/d1/src/lib.rs", "") - .file("foo/d1/src/main.rs", "fn main() { println!(\"d1\"); }") - .file("foo/d2/Cargo.toml", &basic_bin_manifest("d2")) - .file("foo/d2/src/main.rs", "fn main() { println!(\"d2\"); }") - .file("d3/Cargo.toml", &basic_bin_manifest("d3")) - .file("d3/src/main.rs", "fn main() { println!(\"d2\"); }") - .build(); - - let cargo = || { - let mut process_builder = p.cargo("run"); - process_builder.cwd("foo"); - process_builder - }; - - cargo().arg("-p").arg("d1").with_stdout("d1").run(); - - cargo() - .arg("-p") - .arg("d2") - .arg("--bin") - .arg("d2") - .with_stdout("d2") - .run(); - - cargo().with_stdout("foo").run(); - - cargo().arg("-p").arg("d1").arg("-p").arg("d2") - .with_status(1) - .with_stderr_contains("error: The argument '--package ' was provided more than once, but cannot be used multiple times").run(); - - cargo() - .arg("-p") - .arg("d3") - .with_status(101) - .with_stderr_contains("[ERROR] package(s) `d3` not found in workspace [..]") - .run(); - - cargo() - .arg("-p") - .arg("d*") - .with_status(101) - .with_stderr_contains( - "[ERROR] `cargo run` does not support glob pattern `d*` on package selection", - ) - .run(); -} - -#[cargo_test] -fn explicit_bin_with_args() { - let p = project() - .file( - "src/main.rs", - r#" - fn main() { - assert_eq!(std::env::args().nth(1).unwrap(), "hello"); - assert_eq!(std::env::args().nth(2).unwrap(), "world"); - } - "#, - ) - .build(); - - p.cargo("run --bin foo hello world").run(); -} - -#[cargo_test] -fn run_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_bin_manifest("a")) - .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#) - .file("b/Cargo.toml", &basic_bin_manifest("b")) - .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#) - .build(); - - p.cargo("run") - .with_status(101) - .with_stderr( - "\ -[ERROR] `cargo run` could not determine which binary to run[..] -available binaries: a, b", - ) - .run(); - p.cargo("run --bin a").with_stdout("run-a").run(); -} - -#[cargo_test] -fn default_run_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.0.1" - default-run = "a" - "#, - ) - .file("a/src/main.rs", r#"fn main() {println!("run-a");}"#) - .file("b/Cargo.toml", &basic_bin_manifest("b")) - .file("b/src/main.rs", r#"fn main() {println!("run-b");}"#) - .build(); - - p.cargo("run").with_stdout("run-a").run(); -} - -#[cargo_test] -#[cfg(target_os = "macos")] -fn run_link_system_path_macos() { - use cargo_test_support::paths::{self, CargoPathExt}; - use std::fs; - // Check that the default system library path is honored. - // First, build a shared library that will be accessed from - // DYLD_FALLBACK_LIBRARY_PATH. - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - [lib] - crate-type = ["cdylib"] - "#, - ) - .file( - "src/lib.rs", - "#[no_mangle] pub extern fn something_shared() {}", - ) - .build(); - p.cargo("build").run(); - - // This is convoluted. Since this test can't modify things in /usr, - // this needs to dance around to check that things work. - // - // The default DYLD_FALLBACK_LIBRARY_PATH is: - // $(HOME)/lib:/usr/local/lib:/lib:/usr/lib - // - // This will make use of ~/lib in the path, but the default cc link - // path is /usr/lib:/usr/local/lib. So first need to build in one - // location, and then move it to ~/lib. - // - // 1. Build with rustc-link-search pointing to libfoo so the initial - // binary can be linked. - // 2. Move the library to ~/lib - // 3. Run `cargo run` to make sure it can still find the library in - // ~/lib. - // - // This should be equivalent to having the library in /usr/local/lib. - let p2 = project() - .at("bar") - .file("Cargo.toml", &basic_bin_manifest("bar")) - .file( - "src/main.rs", - r#" - extern { - fn something_shared(); - } - fn main() { - unsafe { something_shared(); } - } - "#, - ) - .file( - "build.rs", - &format!( - r#" - fn main() {{ - println!("cargo:rustc-link-lib=foo"); - println!("cargo:rustc-link-search={}"); - }} - "#, - p.target_debug_dir().display() - ), - ) - .build(); - p2.cargo("build").run(); - p2.cargo("test").run(); - - let libdir = paths::home().join("lib"); - fs::create_dir(&libdir).unwrap(); - fs::rename( - p.target_debug_dir().join("libfoo.dylib"), - libdir.join("libfoo.dylib"), - ) - .unwrap(); - p.root().rm_rf(); - const VAR: &str = "DYLD_FALLBACK_LIBRARY_PATH"; - // Reset DYLD_FALLBACK_LIBRARY_PATH so that we don't inherit anything that - // was set by the cargo that invoked the test. - p2.cargo("run").env_remove(VAR).run(); - p2.cargo("test").env_remove(VAR).run(); - // Ensure this still works when DYLD_FALLBACK_LIBRARY_PATH has - // a value set. - p2.cargo("run").env(VAR, &libdir).run(); - p2.cargo("test").env(VAR, &libdir).run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rust_version.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rust_version.rs deleted file mode 100644 index 6f849141f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rust_version.rs +++ /dev/null @@ -1,191 +0,0 @@ -//! Tests for targets with `rust-version`. - -use cargo_test_support::{project, registry::Package}; - -#[cargo_test] -fn rust_version_satisfied() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - rust-version = "1.1.1" - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build").run(); - p.cargo("build --ignore-rust-version").run(); -} - -#[cargo_test] -fn rust_version_bad_caret() { - project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - rust-version = "^1.43" - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build() - .cargo("build") - .with_status(101) - .with_stderr( - "error: failed to parse manifest at `[..]`\n\n\ - Caused by:\n `rust-version` must be a value like \"1.32\"", - ) - .run(); -} - -#[cargo_test] -fn rust_version_bad_pre_release() { - project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - rust-version = "1.43-beta.1" - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build() - .cargo("build") - .with_status(101) - .with_stderr( - "error: failed to parse manifest at `[..]`\n\n\ - Caused by:\n `rust-version` must be a value like \"1.32\"", - ) - .run(); -} - -#[cargo_test] -fn rust_version_bad_nonsense() { - project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - rust-version = "foodaddle" - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build() - .cargo("build") - .with_status(101) - .with_stderr( - "error: failed to parse manifest at `[..]`\n\n\ - Caused by:\n `rust-version` must be a value like \"1.32\"", - ) - .run(); -} - -#[cargo_test] -fn rust_version_too_high() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - rust-version = "1.9876.0" - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "error: package `foo v0.0.1 ([..])` cannot be built because it requires \ - rustc 1.9876.0 or newer, while the currently active rustc version is [..]", - ) - .run(); - p.cargo("build --ignore-rust-version").run(); -} - -#[cargo_test] -fn rust_version_dependency_fails() { - Package::new("bar", "0.0.1") - .rust_version("1.2345.0") - .file("src/lib.rs", "fn other_stuff() {}") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - [dependencies] - bar = "0.0.1" - "#, - ) - .file("src/main.rs", "fn main(){}") - .build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - " Updating `[..]` index\n \ - Downloading crates ...\n \ - Downloaded bar v0.0.1 (registry `[..]`)\n\ - error: package `bar v0.0.1` cannot be built because it requires \ - rustc 1.2345.0 or newer, while the currently active rustc version is [..]", - ) - .run(); - p.cargo("build --ignore-rust-version").run(); -} - -#[cargo_test] -fn rust_version_older_than_edition() { - project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - rust-version = "1.1" - edition = "2018" - [[bin]] - name = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build() - .cargo("build") - .with_status(101) - .with_stderr_contains(" rust-version 1.1 is older than first version (1.31.0) required by the specified edition (2018)", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustc.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustc.rs deleted file mode 100644 index 571e53927..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustc.rs +++ /dev/null @@ -1,783 +0,0 @@ -//! Tests for the `cargo rustc` command. - -use cargo_test_support::{basic_bin_manifest, basic_lib_manifest, basic_manifest, project}; - -const CARGO_RUSTC_ERROR: &str = - "[ERROR] extra arguments to `rustc` can only be passed to one target, consider filtering -the package by passing, e.g., `--lib` or `--bin NAME` to specify a single target"; - -#[cargo_test] -fn build_lib_for_foo() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc --lib -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn lib() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc --lib -v -- -C debug-assertions=off") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C debug-assertions=off \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_main_and_allow_unstable_options() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc -v --bin foo -- -C debug-assertions") - .with_stderr(format!( - "\ -[COMPILING] {name} v{version} ([CWD]) -[RUNNING] `rustc --crate-name {name} src/lib.rs [..]--crate-type lib \ - --emit=[..]link[..]-C debuginfo=2 \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps` -[RUNNING] `rustc --crate-name {name} src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]-C debuginfo=2 \ - -C debug-assertions \ - -C metadata=[..] \ - --out-dir [..] \ - -L dependency=[CWD]/target/debug/deps \ - --extern {name}=[CWD]/target/debug/deps/lib{name}-[..].rlib` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - name = "foo", - version = "0.0.1" - )) - .run(); -} - -#[cargo_test] -fn fails_when_trying_to_build_main_and_lib_with_args() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc -v -- -C debug-assertions") - .with_status(101) - .with_stderr(CARGO_RUSTC_ERROR) - .run(); -} - -#[cargo_test] -fn build_with_args_to_one_of_multiple_binaries() { - let p = project() - .file("src/bin/foo.rs", "fn main() {}") - .file("src/bin/bar.rs", "fn main() {}") - .file("src/bin/baz.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc -v --bin bar -- -C debug-assertions") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]\ - -C debuginfo=2 -C metadata=[..] \ - --out-dir [..]` -[RUNNING] `rustc --crate-name bar src/bin/bar.rs [..]--crate-type bin --emit=[..]link[..]\ - -C debuginfo=2 -C debug-assertions [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fails_with_args_to_all_binaries() { - let p = project() - .file("src/bin/foo.rs", "fn main() {}") - .file("src/bin/bar.rs", "fn main() {}") - .file("src/bin/baz.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc -v -- -C debug-assertions") - .with_status(101) - .with_stderr(CARGO_RUSTC_ERROR) - .run(); -} - -#[cargo_test] -fn fails_with_crate_type_and_without_unstable_options() { - let p = project().file("src/lib.rs", r#" "#).build(); - - p.cargo("rustc --crate-type lib") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "[ERROR] the `crate-type` flag is unstable, pass `-Z unstable-options` to enable it -See https://github.com/rust-lang/cargo/issues/10083 for more information about the `crate-type` flag.", - ) - .run(); -} - -#[cargo_test] -fn fails_with_crate_type_to_multi_binaries() { - let p = project() - .file("src/bin/foo.rs", "fn main() {}") - .file("src/bin/bar.rs", "fn main() {}") - .file("src/bin/baz.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc --crate-type lib -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "[ERROR] crate types to rustc can only be passed to one target, consider filtering -the package by passing, e.g., `--lib` or `--example` to specify a single target", - ) - .run(); -} - -#[cargo_test] -fn fails_with_crate_type_to_multi_examples() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex1" - crate-type = ["rlib"] - [[example]] - name = "ex2" - crate-type = ["rlib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex1.rs", "") - .file("examples/ex2.rs", "") - .build(); - - p.cargo("rustc -v --example ex1 --example ex2 --crate-type lib,cdylib -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "[ERROR] crate types to rustc can only be passed to one target, consider filtering -the package by passing, e.g., `--lib` or `--example` to specify a single target", - ) - .run(); -} - -#[cargo_test] -fn fails_with_crate_type_to_binary() { - let p = project().file("src/bin/foo.rs", "fn main() {}").build(); - - p.cargo("rustc --crate-type lib -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "[ERROR] crate types can only be specified for libraries and example libraries. -Binaries, tests, and benchmarks are always the `bin` crate type", - ) - .run(); -} - -#[cargo_test] -fn build_with_crate_type_for_foo() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc -v --lib --crate-type lib -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_with_crate_types_for_foo() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc -v --lib --crate-type lib,cdylib -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib,cdylib [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_with_crate_type_to_example() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["rlib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .build(); - - p.cargo("rustc -v --example ex --crate-type cdylib -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] -[RUNNING] `rustc --crate-name ex examples/ex.rs [..]--crate-type cdylib [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_with_crate_types_to_example() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex" - crate-type = ["rlib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex.rs", "") - .build(); - - p.cargo("rustc -v --example ex --crate-type lib,cdylib -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] -[RUNNING] `rustc --crate-name ex examples/ex.rs [..]--crate-type lib,cdylib [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_with_crate_types_to_one_of_multi_examples() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[example]] - name = "ex1" - crate-type = ["rlib"] - [[example]] - name = "ex2" - crate-type = ["rlib"] - "#, - ) - .file("src/lib.rs", "") - .file("examples/ex1.rs", "") - .file("examples/ex2.rs", "") - .build(); - - p.cargo("rustc -v --example ex1 --crate-type lib,cdylib -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib [..] -[RUNNING] `rustc --crate-name ex1 examples/ex1.rs [..]--crate-type lib,cdylib [..] -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_with_args_to_one_of_multiple_tests() { - let p = project() - .file("tests/foo.rs", r#" "#) - .file("tests/bar.rs", r#" "#) - .file("tests/baz.rs", r#" "#) - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustc -v --test bar -- -C debug-assertions") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..]--crate-type lib --emit=[..]link[..]\ - -C debuginfo=2 -C metadata=[..] \ - --out-dir [..]` -[RUNNING] `rustc --crate-name bar tests/bar.rs [..]--emit=[..]link[..]-C debuginfo=2 \ - -C debug-assertions [..]--test[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_foo_with_bar_dependency() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("rustc -v -- -C debug-assertions") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 ([..]) -[RUNNING] `[..] -C debuginfo=2 [..]` -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `[..] -C debuginfo=2 -C debug-assertions [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn build_only_bar_dependency() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("rustc -v -p bar -- -C debug-assertions") - .with_stderr( - "\ -[COMPILING] bar v0.1.0 ([..]) -[RUNNING] `rustc --crate-name bar [..]--crate-type lib [..] -C debug-assertions [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn targets_selected_default() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("rustc -v") - // bin - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]", - ) - // bench - .with_stderr_does_not_contain( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \ - -C opt-level=3 --test [..]", - ) - // unit test - .with_stderr_does_not_contain( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link \ - -C debuginfo=2 --test [..]", - ) - .run(); -} - -#[cargo_test] -fn targets_selected_all() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("rustc -v --all-targets") - // bin - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--crate-type bin \ - --emit=[..]link[..]", - ) - // unit test - .with_stderr_contains( - "[RUNNING] `rustc --crate-name foo src/main.rs [..]--emit=[..]link[..]\ - -C debuginfo=2 --test [..]", - ) - .run(); -} - -#[cargo_test] -fn fail_with_multiple_packages() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - - [dependencies.baz] - path = "../baz" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(flag = "1") { println!("Yeah from bar!"); } - } - "#, - ) - .build(); - - let _baz = project() - .at("baz") - .file("Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(flag = "1") { println!("Yeah from baz!"); } - } - "#, - ) - .build(); - - foo.cargo("rustc -v -p bar -p baz") - .with_status(1) - .with_stderr_contains( - "\ -error: The argument '--package ' was provided more than once, \ - but cannot be used multiple times -", - ) - .run(); -} - -#[cargo_test] -fn fail_with_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") - .build(); - - p.cargo("rustc -p '*z'") - .with_status(101) - .with_stderr("[ERROR] Glob patterns on package selection are not supported.") - .run(); -} - -#[cargo_test] -fn rustc_with_other_profile() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies] - a = { path = "a" } - "#, - ) - .file( - "src/main.rs", - r#" - #[cfg(test)] extern crate a; - - #[test] - fn foo() {} - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("rustc --profile test").run(); -} - -#[cargo_test] -fn rustc_fingerprint() { - // Verify that the fingerprint includes the rustc args. - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .build(); - - p.cargo("rustc -v -- -C debug-assertions") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc [..]-C debug-assertions [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("rustc -v -- -C debug-assertions") - .with_stderr( - "\ -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("rustc -v") - .with_stderr_does_not_contain("-C debug-assertions") - .with_stderr( - "\ -[COMPILING] foo [..] -[RUNNING] `rustc [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("rustc -v") - .with_stderr( - "\ -[FRESH] foo [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustc_test_with_implicit_bin() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - #[cfg(foo)] - fn f() { compile_fail!("Foo shouldn't be set."); } - fn main() {} - "#, - ) - .file( - "tests/test1.rs", - r#" - #[cfg(not(foo))] - fn f() { compile_fail!("Foo should be set."); } - "#, - ) - .build(); - - p.cargo("rustc --test test1 -v -- --cfg foo") - .with_stderr_contains( - "\ -[RUNNING] `rustc --crate-name test1 tests/test1.rs [..] --cfg foo [..] -", - ) - .with_stderr_contains( - "\ -[RUNNING] `rustc --crate-name foo src/main.rs [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustc_with_print_cfg_single_target() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", r#"fn main() {} "#) - .build(); - - p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg") - .masquerade_as_nightly_cargo() - .with_stdout_contains("debug_assertions") - .with_stdout_contains("target_arch=\"x86_64\"") - .with_stdout_contains("target_endian=\"little\"") - .with_stdout_contains("target_env=\"msvc\"") - .with_stdout_contains("target_family=\"windows\"") - .with_stdout_contains("target_os=\"windows\"") - .with_stdout_contains("target_pointer_width=\"64\"") - .with_stdout_contains("target_vendor=\"pc\"") - .with_stdout_contains("windows") - .run(); -} - -#[cargo_test] -fn rustc_with_print_cfg_multiple_targets() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", r#"fn main() {} "#) - .build(); - - p.cargo("rustc -Z unstable-options -Z multitarget --target x86_64-pc-windows-msvc --target i686-unknown-linux-gnu --print cfg") - .masquerade_as_nightly_cargo() - .with_stdout_contains("debug_assertions") - .with_stdout_contains("target_arch=\"x86_64\"") - .with_stdout_contains("target_endian=\"little\"") - .with_stdout_contains("target_env=\"msvc\"") - .with_stdout_contains("target_family=\"windows\"") - .with_stdout_contains("target_os=\"windows\"") - .with_stdout_contains("target_pointer_width=\"64\"") - .with_stdout_contains("target_vendor=\"pc\"") - .with_stdout_contains("windows") - .with_stdout_contains("target_env=\"gnu\"") - .with_stdout_contains("target_family=\"unix\"") - .with_stdout_contains("target_pointer_width=\"32\"") - .with_stdout_contains("target_vendor=\"unknown\"") - .with_stdout_contains("target_os=\"linux\"") - .with_stdout_contains("unix") - .run(); -} - -#[cargo_test] -fn rustc_with_print_cfg_rustflags_env_var() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", r#"fn main() {} "#) - .build(); - - p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg") - .masquerade_as_nightly_cargo() - .env("RUSTFLAGS", "-C target-feature=+crt-static") - .with_stdout_contains("debug_assertions") - .with_stdout_contains("target_arch=\"x86_64\"") - .with_stdout_contains("target_endian=\"little\"") - .with_stdout_contains("target_env=\"msvc\"") - .with_stdout_contains("target_family=\"windows\"") - .with_stdout_contains("target_feature=\"crt-static\"") - .with_stdout_contains("target_os=\"windows\"") - .with_stdout_contains("target_pointer_width=\"64\"") - .with_stdout_contains("target_vendor=\"pc\"") - .with_stdout_contains("windows") - .run(); -} - -#[cargo_test] -fn rustc_with_print_cfg_config_toml() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - ".cargo/config.toml", - r#" -[target.x86_64-pc-windows-msvc] -rustflags = ["-C", "target-feature=+crt-static"] -"#, - ) - .file("src/main.rs", r#"fn main() {} "#) - .build(); - - p.cargo("rustc -Z unstable-options --target x86_64-pc-windows-msvc --print cfg") - .masquerade_as_nightly_cargo() - .env("RUSTFLAGS", "-C target-feature=+crt-static") - .with_stdout_contains("debug_assertions") - .with_stdout_contains("target_arch=\"x86_64\"") - .with_stdout_contains("target_endian=\"little\"") - .with_stdout_contains("target_env=\"msvc\"") - .with_stdout_contains("target_family=\"windows\"") - .with_stdout_contains("target_feature=\"crt-static\"") - .with_stdout_contains("target_os=\"windows\"") - .with_stdout_contains("target_pointer_width=\"64\"") - .with_stdout_contains("target_vendor=\"pc\"") - .with_stdout_contains("windows") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustc_info_cache.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustc_info_cache.rs deleted file mode 100644 index 9747fa357..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustc_info_cache.rs +++ /dev/null @@ -1,186 +0,0 @@ -//! Tests for the cache file for the rustc version info. - -use cargo_test_support::{basic_bin_manifest, paths::CargoPathExt}; -use cargo_test_support::{basic_manifest, project}; -use std::env; - -const MISS: &str = "[..] rustc info cache miss[..]"; -const HIT: &str = "[..]rustc info cache hit[..]"; -const UPDATE: &str = "[..]updated rustc info cache[..]"; - -#[cargo_test] -fn rustc_info_cache() { - let p = project() - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .with_stderr_contains("[..]failed to read rustc info cache[..]") - .with_stderr_contains(MISS) - .with_stderr_does_not_contain(HIT) - .with_stderr_contains(UPDATE) - .run(); - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .with_stderr_contains("[..]reusing existing rustc info cache[..]") - .with_stderr_contains(HIT) - .with_stderr_does_not_contain(MISS) - .with_stderr_does_not_contain(UPDATE) - .run(); - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env("CARGO_CACHE_RUSTC_INFO", "0") - .with_stderr_contains("[..]rustc info cache disabled[..]") - .with_stderr_does_not_contain(UPDATE) - .run(); - - let other_rustc = { - let p = project() - .at("compiler") - .file("Cargo.toml", &basic_manifest("compiler", "0.1.0")) - .file( - "src/main.rs", - r#" - use std::process::Command; - use std::env; - - fn main() { - let mut cmd = Command::new("rustc"); - for arg in env::args_os().skip(1) { - cmd.arg(arg); - } - std::process::exit(cmd.status().unwrap().code().unwrap()); - } - "#, - ) - .build(); - p.cargo("build").run(); - - p.root() - .join("target/debug/compiler") - .with_extension(env::consts::EXE_EXTENSION) - }; - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env("RUSTC", other_rustc.display().to_string()) - .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") - .with_stderr_contains(MISS) - .with_stderr_does_not_contain(HIT) - .with_stderr_contains(UPDATE) - .run(); - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env("RUSTC", other_rustc.display().to_string()) - .with_stderr_contains("[..]reusing existing rustc info cache[..]") - .with_stderr_contains(HIT) - .with_stderr_does_not_contain(MISS) - .with_stderr_does_not_contain(UPDATE) - .run(); - - other_rustc.move_into_the_future(); - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env("RUSTC", other_rustc.display().to_string()) - .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") - .with_stderr_contains(MISS) - .with_stderr_does_not_contain(HIT) - .with_stderr_contains(UPDATE) - .run(); - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env("RUSTC", other_rustc.display().to_string()) - .with_stderr_contains("[..]reusing existing rustc info cache[..]") - .with_stderr_contains(HIT) - .with_stderr_does_not_contain(MISS) - .with_stderr_does_not_contain(UPDATE) - .run(); -} - -#[cargo_test] -fn rustc_info_cache_with_wrappers() { - let wrapper_project = project() - .at("wrapper") - .file("Cargo.toml", &basic_bin_manifest("wrapper")) - .file("src/main.rs", r#"fn main() { }"#) - .build(); - let wrapper = wrapper_project.bin("wrapper"); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "test" - version = "0.0.0" - authors = [] - [workspace] - "#, - ) - .file("src/main.rs", r#"fn main() { println!("hello"); }"#) - .build(); - - for &wrapper_env in ["RUSTC_WRAPPER", "RUSTC_WORKSPACE_WRAPPER"].iter() { - p.cargo("clean").with_status(0).run(); - wrapper_project.change_file( - "src/main.rs", - r#" - fn main() { - let mut args = std::env::args_os(); - let _me = args.next().unwrap(); - let rustc = args.next().unwrap(); - let status = std::process::Command::new(rustc).args(args).status().unwrap(); - std::process::exit(if status.success() { 0 } else { 1 }) - } - "#, - ); - wrapper_project.cargo("build").with_status(0).run(); - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env(wrapper_env, &wrapper) - .with_stderr_contains("[..]failed to read rustc info cache[..]") - .with_stderr_contains(MISS) - .with_stderr_contains(UPDATE) - .with_stderr_does_not_contain(HIT) - .with_status(0) - .run(); - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env(wrapper_env, &wrapper) - .with_stderr_contains("[..]reusing existing rustc info cache[..]") - .with_stderr_contains(HIT) - .with_stderr_does_not_contain(UPDATE) - .with_stderr_does_not_contain(MISS) - .with_status(0) - .run(); - - wrapper_project.change_file("src/main.rs", r#"fn main() { panic!() }"#); - wrapper_project.cargo("build").with_status(0).run(); - - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env(wrapper_env, &wrapper) - .with_stderr_contains("[..]different compiler, creating new rustc info cache[..]") - .with_stderr_contains(MISS) - .with_stderr_contains(UPDATE) - .with_stderr_does_not_contain(HIT) - .with_status(101) - .run(); - p.cargo("build") - .env("CARGO_LOG", "cargo::util::rustc=debug") - .env(wrapper_env, &wrapper) - .with_stderr_contains("[..]reusing existing rustc info cache[..]") - .with_stderr_contains(HIT) - .with_stderr_does_not_contain(UPDATE) - .with_stderr_does_not_contain(MISS) - .with_status(101) - .run(); - } -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdoc.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdoc.rs deleted file mode 100644 index 5650f3e0a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdoc.rs +++ /dev/null @@ -1,252 +0,0 @@ -//! Tests for the `cargo rustdoc` command. - -use cargo_test_support::{basic_manifest, cross_compile, project}; - -#[cargo_test] -fn rustdoc_simple() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("rustdoc -v") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ - -o [CWD]/target/doc \ - [..] \ - -L dependency=[CWD]/target/debug/deps [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_args() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("rustdoc -v -- --cfg=foo") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([CWD]) -[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ - -o [CWD]/target/doc \ - [..] \ - --cfg=foo \ - -C metadata=[..] \ - -L dependency=[CWD]/target/debug/deps [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_binary_args_passed() { - let p = project().file("src/main.rs", "").build(); - - p.cargo("rustdoc -v") - .arg("--") - .arg("--markdown-no-toc") - .with_stderr_contains("[RUNNING] `rustdoc [..] --markdown-no-toc[..]`") - .run(); -} - -#[cargo_test] -fn rustdoc_foo_with_bar_dependency() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "extern crate bar; pub fn foo() {}") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("rustdoc -v -- --cfg=foo") - .with_stderr( - "\ -[CHECKING] bar v0.0.1 ([..]) -[RUNNING] `rustc [..]bar/src/lib.rs [..]` -[DOCUMENTING] foo v0.0.1 ([CWD]) -[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ - -o [CWD]/target/doc \ - [..] \ - --cfg=foo \ - -C metadata=[..] \ - -L dependency=[CWD]/target/debug/deps \ - --extern [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_only_bar_dependency() { - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/main.rs", "extern crate bar; fn main() { bar::baz() }") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("src/lib.rs", "pub fn baz() {}") - .build(); - - foo.cargo("rustdoc -v -p bar -- --cfg=foo") - .with_stderr( - "\ -[DOCUMENTING] bar v0.0.1 ([..]) -[RUNNING] `rustdoc [..]--crate-name bar [..]bar/src/lib.rs [..]\ - -o [CWD]/target/doc \ - [..] \ - --cfg=foo \ - -C metadata=[..] \ - -L dependency=[CWD]/target/debug/deps [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_same_name_documents_lib() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file("src/lib.rs", r#" "#) - .build(); - - p.cargo("rustdoc -v -- --cfg=foo") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([..]) -[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ - -o [CWD]/target/doc \ - [..] \ - --cfg=foo \ - -C metadata=[..] \ - -L dependency=[CWD]/target/debug/deps [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn features() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [features] - quux = [] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("rustdoc --verbose --features quux") - .with_stderr_contains("[..]feature=[..]quux[..]") - .run(); -} - -#[cargo_test] -fn proc_macro_crate_type() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - proc-macro = true - - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("rustdoc --verbose") - .with_stderr_contains( - "\ -[RUNNING] `rustdoc --crate-type proc-macro [..]` -", - ) - .run(); -} - -#[cargo_test] -fn rustdoc_target() { - if cross_compile::disabled() { - return; - } - - let p = project().file("src/lib.rs", "").build(); - - p.cargo("rustdoc --verbose --target") - .arg(cross_compile::alternate()) - .with_stderr(format!( - "\ -[DOCUMENTING] foo v0.0.1 ([..]) -[RUNNING] `rustdoc [..]--crate-name foo src/lib.rs [..]\ - --target {target} \ - -o [CWD]/target/{target}/doc \ - [..] \ - -L dependency=[CWD]/target/{target}/debug/deps \ - -L dependency=[CWD]/target/debug/deps[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]", - target = cross_compile::alternate() - )) - .run(); -} - -#[cargo_test] -fn fail_with_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() { break_the_build(); }") - .build(); - - p.cargo("rustdoc -p '*z'") - .with_status(101) - .with_stderr("[ERROR] Glob patterns on package selection are not supported.") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdoc_extern_html.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdoc_extern_html.rs deleted file mode 100644 index 4d3d575f9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdoc_extern_html.rs +++ /dev/null @@ -1,372 +0,0 @@ -//! Tests for the -Zrustdoc-map feature. - -use cargo_test_support::registry::{self, Package}; -use cargo_test_support::{is_nightly, paths, project, Project}; - -fn basic_project() -> Project { - Package::new("bar", "1.0.0") - .file("src/lib.rs", "pub struct Straw;") - .publish(); - - project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - bar = "1.0" - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn myfun() -> Option { - None - } - "#, - ) - .build() -} - -#[cargo_test] -fn ignores_on_stable() { - // Requires -Zrustdoc-map to use. - let p = basic_project(); - p.cargo("doc -v --no-deps") - .with_stderr_does_not_contain("[..]--extern-html-root-url[..]") - .run(); -} - -#[cargo_test] -fn simple() { - // Basic test that it works with crates.io. - if !is_nightly() { - // --extern-html-root-url is unstable - return; - } - let p = basic_project(); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https://docs.rs/bar/1.0.0/[..]", - ) - .run(); - let myfun = p.read_file("target/doc/foo/fn.myfun.html"); - assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/bar/struct.Straw.html""#)); -} - -#[cargo_test] -// Broken, temporarily disable until https://github.com/rust-lang/rust/pull/82776 is resolved. -#[ignore] -fn std_docs() { - // Mapping std docs somewhere else. - if !is_nightly() { - // --extern-html-root-url is unstable - return; - } - // For local developers, skip this test if docs aren't installed. - let docs = std::path::Path::new(&paths::sysroot()).join("share/doc/rust/html"); - if !docs.exists() { - if cargo_util::is_ci() { - panic!("std docs are not installed, check that the rust-docs component is installed"); - } else { - eprintln!( - "documentation not found at {}, \ - skipping test (run `rustdoc component add rust-docs` to install", - docs.display() - ); - return; - } - } - let p = basic_project(); - p.change_file( - ".cargo/config", - r#" - [doc.extern-map] - std = "local" - "#, - ); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains("[RUNNING] `rustdoc [..]--crate-name foo [..]std=file://[..]") - .run(); - let myfun = p.read_file("target/doc/foo/fn.myfun.html"); - assert!(myfun.contains(r#"share/doc/rust/html/core/option/enum.Option.html""#)); - - p.change_file( - ".cargo/config", - r#" - [doc.extern-map] - std = "https://example.com/rust/" - "#, - ); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--crate-name foo [..]std=https://example.com/rust/[..]", - ) - .run(); - let myfun = p.read_file("target/doc/foo/fn.myfun.html"); - assert!(myfun.contains(r#"href="https://example.com/rust/core/option/enum.Option.html""#)); -} - -#[cargo_test] -fn renamed_dep() { - // Handles renamed dependencies. - if !is_nightly() { - // --extern-html-root-url is unstable - return; - } - Package::new("bar", "1.0.0") - .file("src/lib.rs", "pub struct Straw;") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - groovy = { version = "1.0", package = "bar" } - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn myfun() -> Option { - None - } - "#, - ) - .build(); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--crate-name foo [..]bar=https://docs.rs/bar/1.0.0/[..]", - ) - .run(); - let myfun = p.read_file("target/doc/foo/fn.myfun.html"); - assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/bar/struct.Straw.html""#)); -} - -#[cargo_test] -fn lib_name() { - // Handles lib name != package name. - if !is_nightly() { - // --extern-html-root-url is unstable - return; - } - Package::new("bar", "1.0.0") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "1.0.0" - - [lib] - name = "rumpelstiltskin" - "#, - ) - .file("src/lib.rs", "pub struct Straw;") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn myfun() -> Option { - None - } - "#, - ) - .build(); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--crate-name foo [..]rumpelstiltskin=https://docs.rs/bar/1.0.0/[..]", - ) - .run(); - let myfun = p.read_file("target/doc/foo/fn.myfun.html"); - assert!(myfun.contains(r#"href="https://docs.rs/bar/1.0.0/rumpelstiltskin/struct.Straw.html""#)); -} - -#[cargo_test] -fn alt_registry() { - // Supports other registry names. - if !is_nightly() { - // --extern-html-root-url is unstable - return; - } - registry::alt_init(); - Package::new("bar", "1.0.0") - .alternative(true) - .file( - "src/lib.rs", - r#" - extern crate baz; - pub struct Queen; - pub use baz::King; - "#, - ) - .registry_dep("baz", "1.0") - .publish(); - Package::new("baz", "1.0.0") - .alternative(true) - .file("src/lib.rs", "pub struct King;") - .publish(); - Package::new("grimm", "1.0.0") - .file("src/lib.rs", "pub struct Gold;") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - bar = { version = "1.0", registry="alternative" } - grimm = "1.0" - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn queen() -> bar::Queen { bar::Queen } - pub fn king() -> bar::King { bar::King } - pub fn gold() -> grimm::Gold { grimm::Gold } - "#, - ) - .file( - ".cargo/config", - r#" - [doc.extern-map.registries] - alternative = "https://example.com/{pkg_name}/{version}/" - crates-io = "https://docs.rs/" - "#, - ) - .build(); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--crate-name foo \ - [..]bar=https://example.com/bar/1.0.0/[..]grimm=https://docs.rs/grimm/1.0.0/[..]", - ) - .run(); - let queen = p.read_file("target/doc/foo/fn.queen.html"); - assert!(queen.contains(r#"href="https://example.com/bar/1.0.0/bar/struct.Queen.html""#)); - // The king example fails to link. Rustdoc seems to want the origin crate - // name (baz) for re-exports. There are many issues in the issue tracker - // for rustdoc re-exports, so I'm not sure, but I think this is maybe a - // rustdoc issue. Alternatively, Cargo could provide mappings for all - // transitive dependencies to fix this. - let king = p.read_file("target/doc/foo/fn.king.html"); - assert!(king.contains(r#"-> King"#)); - - let gold = p.read_file("target/doc/foo/fn.gold.html"); - assert!(gold.contains(r#"href="https://docs.rs/grimm/1.0.0/grimm/struct.Gold.html""#)); -} - -#[cargo_test] -fn multiple_versions() { - // What happens when there are multiple versions. - // NOTE: This is currently broken behavior. Rustdoc does not provide a way - // to match renamed dependencies. - if !is_nightly() { - // --extern-html-root-url is unstable - return; - } - Package::new("bar", "1.0.0") - .file("src/lib.rs", "pub struct Spin;") - .publish(); - Package::new("bar", "2.0.0") - .file("src/lib.rs", "pub struct Straw;") - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - bar = "1.0" - bar2 = {version="2.0", package="bar"} - "#, - ) - .file( - "src/lib.rs", - " - pub fn fn1() -> bar::Spin {bar::Spin} - pub fn fn2() -> bar2::Straw {bar2::Straw} - ", - ) - .build(); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--crate-name foo \ - [..]bar=https://docs.rs/bar/1.0.0/[..]bar=https://docs.rs/bar/2.0.0/[..]", - ) - .run(); - let fn1 = p.read_file("target/doc/foo/fn.fn1.html"); - // This should be 1.0.0, rustdoc seems to use the last entry when there - // are duplicates. - assert!(fn1.contains(r#"href="https://docs.rs/bar/2.0.0/bar/struct.Spin.html""#)); - let fn2 = p.read_file("target/doc/foo/fn.fn2.html"); - assert!(fn2.contains(r#"href="https://docs.rs/bar/2.0.0/bar/struct.Straw.html""#)); -} - -#[cargo_test] -fn rebuilds_when_changing() { - // Make sure it rebuilds if the map changes. - if !is_nightly() { - // --extern-html-root-url is unstable - return; - } - let p = basic_project(); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains("[..]--extern-html-root-url[..]") - .run(); - - // This also tests that the map for docs.rs can be overridden. - p.change_file( - ".cargo/config", - r#" - [doc.extern-map.registries] - crates-io = "https://example.com/" - "#, - ); - p.cargo("doc -v --no-deps -Zrustdoc-map") - .masquerade_as_nightly_cargo() - .with_stderr_contains( - "[RUNNING] `rustdoc [..]--extern-html-root-url [..]bar=https://example.com/bar/1.0.0/[..]", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdocflags.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdocflags.rs deleted file mode 100644 index b17f83c2f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustdocflags.rs +++ /dev/null @@ -1,123 +0,0 @@ -//! Tests for setting custom rustdoc flags. - -use cargo_test_support::project; - -#[cargo_test] -fn parses_env() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("doc -v") - .env("RUSTDOCFLAGS", "--cfg=foo") - .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg=foo[..]`") - .run(); -} - -#[cargo_test] -fn parses_config() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - rustdocflags = ["--cfg", "foo"] - "#, - ) - .build(); - - p.cargo("doc -v") - .with_stderr_contains("[RUNNING] `rustdoc [..] --cfg foo[..]`") - .run(); -} - -#[cargo_test] -fn bad_flags() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("doc") - .env("RUSTDOCFLAGS", "--bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn rerun() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("doc").env("RUSTDOCFLAGS", "--cfg=foo").run(); - p.cargo("doc") - .env("RUSTDOCFLAGS", "--cfg=foo") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - p.cargo("doc") - .env("RUSTDOCFLAGS", "--cfg=bar") - .with_stderr( - "\ -[DOCUMENTING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn rustdocflags_passed_to_rustdoc_through_cargo_test() { - let p = project() - .file( - "src/lib.rs", - r#" - //! ``` - //! assert!(cfg!(do_not_choke)); - //! ``` - "#, - ) - .build(); - - p.cargo("test --doc") - .env("RUSTDOCFLAGS", "--cfg do_not_choke") - .run(); -} - -#[cargo_test] -fn rustdocflags_passed_to_rustdoc_through_cargo_test_only_once() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("test --doc") - .env("RUSTDOCFLAGS", "--markdown-no-toc") - .run(); -} - -#[cargo_test] -fn rustdocflags_misspelled() { - let p = project().file("src/main.rs", "fn main() { }").build(); - - p.cargo("doc") - .env("RUSTDOC_FLAGS", "foo") - .with_stderr_contains("[WARNING] Cargo does not read `RUSTDOC_FLAGS` environment variable. Did you mean `RUSTDOCFLAGS`?") - .run(); -} - -#[cargo_test] -fn whitespace() { - // Checks behavior of different whitespace characters. - let p = project().file("src/lib.rs", "").build(); - - // "too many operands" - p.cargo("doc") - .env("RUSTDOCFLAGS", "--crate-version this has spaces") - .with_stderr_contains("[ERROR] could not document `foo`") - .with_status(101) - .run(); - - const SPACED_VERSION: &str = "a\nb\tc\u{00a0}d"; - p.cargo("doc") - .env( - "RUSTDOCFLAGS", - format!("--crate-version {}", SPACED_VERSION), - ) - .run(); - - let contents = p.read_file("target/doc/foo/index.html"); - assert!(contents.contains(SPACED_VERSION)); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustflags.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustflags.rs deleted file mode 100644 index 4f825b4f3..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/rustflags.rs +++ /dev/null @@ -1,1437 +0,0 @@ -//! Tests for setting custom rustc flags. - -use cargo_test_support::registry::Package; -use cargo_test_support::{ - basic_lib_manifest, basic_manifest, paths, project, project_in_home, rustc_host, -}; -use std::fs; - -#[cargo_test] -fn env_rustflags_normal_source() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .file("tests/c.rs", "#[test] fn f() { }") - .file( - "benches/d.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - "#, - ) - .build(); - - // Use RUSTFLAGS to pass an argument that will generate an error - p.cargo("build --lib") - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --bin=a") - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --example=b") - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("test") - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("bench") - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn env_rustflags_build_script() { - // RUSTFLAGS should be passed to rustc for build scripts - // when --target is not specified. - // In this test if --cfg foo is passed the build will fail. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { } - #[cfg(not(foo))] - fn main() { } - "#, - ) - .build(); - - p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); -} - -#[cargo_test] -fn env_rustflags_build_script_dep() { - // RUSTFLAGS should be passed to rustc for build scripts - // when --target is not specified. - // In this test if --cfg foo is not passed the build will fail. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - - [build-dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file( - "src/lib.rs", - r#" - fn bar() { } - #[cfg(not(foo))] - fn bar() { } - "#, - ) - .build(); - - foo.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); -} - -#[cargo_test] -fn env_rustflags_plugin() { - // RUSTFLAGS should be passed to rustc for plugins - // when --target is not specified. - // In this test if --cfg foo is not passed the build will fail. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - name = "foo" - plugin = true - "#, - ) - .file( - "src/lib.rs", - r#" - fn main() { } - #[cfg(not(foo))] - fn main() { } - "#, - ) - .build(); - - p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); -} - -#[cargo_test] -fn env_rustflags_plugin_dep() { - // RUSTFLAGS should be passed to rustc for plugins - // when --target is not specified. - // In this test if --cfg foo is not passed the build will fail. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - name = "foo" - plugin = true - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "fn foo() {}") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_lib_manifest("bar")) - .file( - "src/lib.rs", - r#" - fn bar() { } - #[cfg(not(foo))] - fn bar() { } - "#, - ) - .build(); - - foo.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); -} - -#[cargo_test] -fn env_rustflags_normal_source_with_target() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .file("tests/c.rs", "#[test] fn f() { }") - .file( - "benches/d.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - "#, - ) - .build(); - - let host = &rustc_host(); - - // Use RUSTFLAGS to pass an argument that will generate an error - p.cargo("build --lib --target") - .arg(host) - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --bin=a --target") - .arg(host) - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --example=b --target") - .arg(host) - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("test --target") - .arg(host) - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("bench --target") - .arg(host) - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn env_rustflags_build_script_with_target() { - // RUSTFLAGS should not be passed to rustc for build scripts - // when --target is specified. - // In this test if --cfg foo is passed the build will fail. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { } - #[cfg(foo)] - fn main() { } - "#, - ) - .build(); - - let host = rustc_host(); - p.cargo("build --target") - .arg(host) - .env("RUSTFLAGS", "--cfg foo") - .run(); -} - -#[cargo_test] -fn env_rustflags_build_script_dep_with_target() { - // RUSTFLAGS should not be passed to rustc for build scripts - // when --target is specified. - // In this test if --cfg foo is passed the build will fail. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - - [build-dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file( - "src/lib.rs", - r#" - fn bar() { } - #[cfg(foo)] - fn bar() { } - "#, - ) - .build(); - - let host = rustc_host(); - foo.cargo("build --target") - .arg(host) - .env("RUSTFLAGS", "--cfg foo") - .run(); -} - -#[cargo_test] -fn env_rustflags_plugin_with_target() { - // RUSTFLAGS should not be passed to rustc for plugins - // when --target is specified. - // In this test if --cfg foo is passed the build will fail. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - name = "foo" - plugin = true - "#, - ) - .file( - "src/lib.rs", - r#" - fn main() { } - #[cfg(foo)] - fn main() { } - "#, - ) - .build(); - - let host = rustc_host(); - p.cargo("build --target") - .arg(host) - .env("RUSTFLAGS", "--cfg foo") - .run(); -} - -#[cargo_test] -fn env_rustflags_plugin_dep_with_target() { - // RUSTFLAGS should not be passed to rustc for plugins - // when --target is specified. - // In this test if --cfg foo is passed the build will fail. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - name = "foo" - plugin = true - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "fn foo() {}") - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_lib_manifest("bar")) - .file( - "src/lib.rs", - r#" - fn bar() { } - #[cfg(foo)] - fn bar() { } - "#, - ) - .build(); - - let host = rustc_host(); - foo.cargo("build --target") - .arg(host) - .env("RUSTFLAGS", "--cfg foo") - .run(); -} - -#[cargo_test] -fn env_rustflags_recompile() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build").run(); - // Setting RUSTFLAGS forces a recompile - p.cargo("build") - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn env_rustflags_recompile2() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); - // Setting RUSTFLAGS forces a recompile - p.cargo("build") - .env("RUSTFLAGS", "-Z bogus") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn env_rustflags_no_recompile() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); - p.cargo("build") - .env("RUSTFLAGS", "--cfg foo") - .with_stdout("") - .run(); -} - -#[cargo_test] -fn build_rustflags_normal_source() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .file("tests/c.rs", "#[test] fn f() { }") - .file( - "benches/d.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - "#, - ) - .file( - ".cargo/config", - r#" - [build] - rustflags = ["-Z", "bogus"] - "#, - ) - .build(); - - p.cargo("build --lib") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --bin=a") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --example=b") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("test") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("bench") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn build_rustflags_build_script() { - // RUSTFLAGS should be passed to rustc for build scripts - // when --target is not specified. - // In this test if --cfg foo is passed the build will fail. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { } - #[cfg(not(foo))] - fn main() { } - "#, - ) - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn build_rustflags_build_script_dep() { - // RUSTFLAGS should be passed to rustc for build scripts - // when --target is not specified. - // In this test if --cfg foo is not passed the build will fail. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - - [build-dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file( - "src/lib.rs", - r#" - fn bar() { } - #[cfg(not(foo))] - fn bar() { } - "#, - ) - .build(); - - foo.cargo("build").run(); -} - -#[cargo_test] -fn build_rustflags_plugin() { - // RUSTFLAGS should be passed to rustc for plugins - // when --target is not specified. - // In this test if --cfg foo is not passed the build will fail. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - name = "foo" - plugin = true - "#, - ) - .file( - "src/lib.rs", - r#" - fn main() { } - #[cfg(not(foo))] - fn main() { } - "#, - ) - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn build_rustflags_plugin_dep() { - // RUSTFLAGS should be passed to rustc for plugins - // when --target is not specified. - // In this test if --cfg foo is not passed the build will fail. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - name = "foo" - plugin = true - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "fn foo() {}") - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_lib_manifest("bar")) - .file( - "src/lib.rs", - r#" - fn bar() { } - #[cfg(not(foo))] - fn bar() { } - "#, - ) - .build(); - - foo.cargo("build").run(); -} - -#[cargo_test] -fn build_rustflags_normal_source_with_target() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .file("tests/c.rs", "#[test] fn f() { }") - .file( - "benches/d.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - "#, - ) - .file( - ".cargo/config", - r#" - [build] - rustflags = ["-Z", "bogus"] - "#, - ) - .build(); - - let host = &rustc_host(); - - // Use RUSTFLAGS to pass an argument that will generate an error - p.cargo("build --lib --target") - .arg(host) - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --bin=a --target") - .arg(host) - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --example=b --target") - .arg(host) - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("test --target") - .arg(host) - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("bench --target") - .arg(host) - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn build_rustflags_build_script_with_target() { - // RUSTFLAGS should not be passed to rustc for build scripts - // when --target is specified. - // In this test if --cfg foo is passed the build will fail. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - fn main() { } - #[cfg(foo)] - fn main() { } - "#, - ) - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - - let host = rustc_host(); - p.cargo("build --target").arg(host).run(); -} - -#[cargo_test] -fn build_rustflags_build_script_dep_with_target() { - // RUSTFLAGS should not be passed to rustc for build scripts - // when --target is specified. - // In this test if --cfg foo is passed the build will fail. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - - [build-dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file( - "src/lib.rs", - r#" - fn bar() { } - #[cfg(foo)] - fn bar() { } - "#, - ) - .build(); - - let host = rustc_host(); - foo.cargo("build --target").arg(host).run(); -} - -#[cargo_test] -fn build_rustflags_plugin_with_target() { - // RUSTFLAGS should not be passed to rustc for plugins - // when --target is specified. - // In this test if --cfg foo is passed the build will fail. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - name = "foo" - plugin = true - "#, - ) - .file( - "src/lib.rs", - r#" - fn main() { } - #[cfg(foo)] - fn main() { } - "#, - ) - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - - let host = rustc_host(); - p.cargo("build --target").arg(host).run(); -} - -#[cargo_test] -fn build_rustflags_plugin_dep_with_target() { - // RUSTFLAGS should not be passed to rustc for plugins - // when --target is specified. - // In this test if --cfg foo is passed the build will fail. - let foo = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - name = "foo" - plugin = true - - [dependencies.bar] - path = "../bar" - "#, - ) - .file("src/lib.rs", "fn foo() {}") - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - let _bar = project() - .at("bar") - .file("Cargo.toml", &basic_lib_manifest("bar")) - .file( - "src/lib.rs", - r#" - fn bar() { } - #[cfg(foo)] - fn bar() { } - "#, - ) - .build(); - - let host = rustc_host(); - foo.cargo("build --target").arg(host).run(); -} - -#[cargo_test] -fn build_rustflags_recompile() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build").run(); - - // Setting RUSTFLAGS forces a recompile - let config = r#" - [build] - rustflags = ["-Z", "bogus"] - "#; - let config_file = paths::root().join("foo/.cargo/config"); - fs::create_dir_all(config_file.parent().unwrap()).unwrap(); - fs::write(config_file, config).unwrap(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn build_rustflags_recompile2() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); - - // Setting RUSTFLAGS forces a recompile - let config = r#" - [build] - rustflags = ["-Z", "bogus"] - "#; - let config_file = paths::root().join("foo/.cargo/config"); - fs::create_dir_all(config_file.parent().unwrap()).unwrap(); - fs::write(config_file, config).unwrap(); - - p.cargo("build") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn build_rustflags_no_recompile() { - let p = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - - p.cargo("build").env("RUSTFLAGS", "--cfg foo").run(); - p.cargo("build") - .env("RUSTFLAGS", "--cfg foo") - .with_stdout("") - .run(); -} - -#[cargo_test] -fn build_rustflags_with_home_config() { - // We need a config file inside the home directory - let home = paths::home(); - let home_config = home.join(".cargo"); - fs::create_dir(&home_config).unwrap(); - fs::write( - &home_config.join("config"), - r#" - [build] - rustflags = ["-Cllvm-args=-x86-asm-syntax=intel"] - "#, - ) - .unwrap(); - - // And we need the project to be inside the home directory - // so the walking process finds the home project twice. - let p = project_in_home("foo").file("src/lib.rs", "").build(); - - p.cargo("build -v").run(); -} - -#[cargo_test] -fn target_rustflags_normal_source() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .file("tests/c.rs", "#[test] fn f() { }") - .file( - "benches/d.rs", - r#" - #![feature(test)] - extern crate test; - #[bench] fn run1(_ben: &mut test::Bencher) { } - "#, - ) - .file( - ".cargo/config", - &format!( - " - [target.{}] - rustflags = [\"-Z\", \"bogus\"] - ", - rustc_host() - ), - ) - .build(); - - p.cargo("build --lib") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --bin=a") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --example=b") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("test") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("bench") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -// target.{}.rustflags takes precedence over build.rustflags -#[cargo_test] -fn target_rustflags_precedence() { - let p = project() - .file("src/lib.rs", "") - .file("src/bin/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .file("tests/c.rs", "#[test] fn f() { }") - .file( - ".cargo/config", - &format!( - " - [build] - rustflags = [\"--cfg\", \"foo\"] - - [target.{}] - rustflags = [\"-Z\", \"bogus\"] - ", - rustc_host() - ), - ) - .build(); - - p.cargo("build --lib") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --bin=a") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("build --example=b") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("test") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); - p.cargo("bench") - .with_status(101) - .with_stderr_contains("[..]bogus[..]") - .run(); -} - -#[cargo_test] -fn cfg_rustflags_normal_source() { - let p = project() - .file("src/lib.rs", "pub fn t() {}") - .file("src/bin/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .file("tests/c.rs", "#[test] fn f() { }") - .file( - ".cargo/config", - &format!( - r#" - [target.'cfg({})'] - rustflags = ["--cfg", "bar"] - "#, - if rustc_host().contains("-windows-") { - "windows" - } else { - "not(windows)" - } - ), - ) - .build(); - - p.cargo("build --lib -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build --bin=a -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build --example=b -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("test --no-run -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[RUNNING] `rustc [..] --cfg bar[..]` -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("bench --no-run -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[RUNNING] `rustc [..] --cfg bar[..]` -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] bench [optimized] target(s) in [..] -", - ) - .run(); -} - -// target.'cfg(...)'.rustflags takes precedence over build.rustflags -#[cargo_test] -fn cfg_rustflags_precedence() { - let p = project() - .file("src/lib.rs", "pub fn t() {}") - .file("src/bin/a.rs", "fn main() {}") - .file("examples/b.rs", "fn main() {}") - .file("tests/c.rs", "#[test] fn f() { }") - .file( - ".cargo/config", - &format!( - r#" - [build] - rustflags = ["--cfg", "foo"] - - [target.'cfg({})'] - rustflags = ["--cfg", "bar"] - "#, - if rustc_host().contains("-windows-") { - "windows" - } else { - "not(windows)" - } - ), - ) - .build(); - - p.cargo("build --lib -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build --bin=a -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build --example=b -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("test --no-run -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[RUNNING] `rustc [..] --cfg bar[..]` -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("bench --no-run -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg bar[..]` -[RUNNING] `rustc [..] --cfg bar[..]` -[RUNNING] `rustc [..] --cfg bar[..]` -[FINISHED] bench [optimized] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn target_rustflags_string_and_array_form1() { - let p1 = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - rustflags = ["--cfg", "foo"] - "#, - ) - .build(); - - p1.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg foo[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - let p2 = project() - .file("src/lib.rs", "") - .file( - ".cargo/config", - r#" - [build] - rustflags = "--cfg foo" - "#, - ) - .build(); - - p2.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg foo[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn target_rustflags_string_and_array_form2() { - let p1 = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - rustflags = ["--cfg", "foo"] - "#, - rustc_host() - ), - ) - .file("src/lib.rs", "") - .build(); - - p1.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg foo[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - let p2 = project() - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - rustflags = "--cfg foo" - "#, - rustc_host() - ), - ) - .file("src/lib.rs", "") - .build(); - - p2.cargo("build -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] --cfg foo[..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn two_matching_in_config() { - let p1 = project() - .file( - ".cargo/config", - r#" - [target.'cfg(unix)'] - rustflags = ["--cfg", 'foo="a"'] - [target.'cfg(windows)'] - rustflags = ["--cfg", 'foo="a"'] - [target.'cfg(target_pointer_width = "32")'] - rustflags = ["--cfg", 'foo="b"'] - [target.'cfg(target_pointer_width = "64")'] - rustflags = ["--cfg", 'foo="b"'] - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - if cfg!(foo = "a") { - println!("a"); - } else if cfg!(foo = "b") { - println!("b"); - } else { - panic!() - } - } - "#, - ) - .build(); - - p1.cargo("run").run(); - p1.cargo("build").with_stderr("[FINISHED] [..]").run(); -} - -#[cargo_test] -fn env_rustflags_misspelled() { - let p = project().file("src/main.rs", "fn main() { }").build(); - - for cmd in &["check", "build", "run", "test", "bench"] { - p.cargo(cmd) - .env("RUST_FLAGS", "foo") - .with_stderr_contains("[WARNING] Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?") - .run(); - } -} - -#[cargo_test] -fn env_rustflags_misspelled_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - build = "build.rs" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() { }") - .build(); - - p.cargo("build") - .env("RUST_FLAGS", "foo") - .with_stderr_contains("[WARNING] Cargo does not read `RUST_FLAGS` environment variable. Did you mean `RUSTFLAGS`?") - .run(); -} - -#[cargo_test] -fn remap_path_prefix_ignored() { - // Ensure that --remap-path-prefix does not affect metadata hash. - let p = project().file("src/lib.rs", "").build(); - p.cargo("build").run(); - let rlibs = p - .glob("target/debug/deps/*.rlib") - .collect::, _>>() - .unwrap(); - assert_eq!(rlibs.len(), 1); - p.cargo("clean").run(); - - let check_metadata_same = || { - let rlibs2 = p - .glob("target/debug/deps/*.rlib") - .collect::, _>>() - .unwrap(); - assert_eq!(rlibs, rlibs2); - }; - - p.cargo("build") - .env( - "RUSTFLAGS", - "--remap-path-prefix=/abc=/zoo --remap-path-prefix /spaced=/zoo", - ) - .run(); - check_metadata_same(); - - p.cargo("clean").run(); - p.cargo("rustc -- --remap-path-prefix=/abc=/zoo --remap-path-prefix /spaced=/zoo") - .run(); - check_metadata_same(); -} - -#[cargo_test] -fn remap_path_prefix_works() { - // Check that remap-path-prefix works. - Package::new("bar", "0.1.0") - .file("src/lib.rs", "pub fn f() -> &'static str { file!() }") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - println!("{}", bar::f()); - } - "#, - ) - .build(); - - p.cargo("run") - .env( - "RUSTFLAGS", - format!("--remap-path-prefix={}=/foo", paths::root().display()), - ) - .with_stdout("/foo/home/.cargo/registry/src/[..]/bar-0.1.0/src/lib.rs") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/search.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/search.rs deleted file mode 100644 index abb709ebc..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/search.rs +++ /dev/null @@ -1,192 +0,0 @@ -//! Tests for the `cargo search` command. - -use cargo_test_support::cargo_process; -use cargo_test_support::git::repo; -use cargo_test_support::paths; -use cargo_test_support::registry::{api_path, registry_path, registry_url}; -use std::collections::HashSet; -use std::fs; -use std::path::Path; -use url::Url; - -fn api() -> Url { - Url::from_file_path(&*api_path()).ok().unwrap() -} - -fn write_crates(dest: &Path) { - let content = r#"{ - "crates": [{ - "created_at": "2014-11-16T20:17:35Z", - "description": "Design by contract style assertions for Rust", - "documentation": null, - "downloads": 2, - "homepage": null, - "id": "hoare", - "keywords": [], - "license": null, - "links": { - "owners": "/api/v1/crates/hoare/owners", - "reverse_dependencies": "/api/v1/crates/hoare/reverse_dependencies", - "version_downloads": "/api/v1/crates/hoare/downloads", - "versions": "/api/v1/crates/hoare/versions" - }, - "max_version": "0.1.1", - "name": "hoare", - "repository": "https://github.com/nick29581/libhoare", - "updated_at": "2014-11-20T21:49:21Z", - "versions": null - }, - { - "id": "postgres", - "name": "postgres", - "updated_at": "2020-05-01T23:17:54.335921+00:00", - "versions": null, - "keywords": null, - "categories": null, - "badges": [ - { - "badge_type": "circle-ci", - "attributes": { - "repository": "sfackler/rust-postgres", - "branch": null - } - } - ], - "created_at": "2014-11-24T02:34:44.756689+00:00", - "downloads": 535491, - "recent_downloads": 88321, - "max_version": "0.17.3", - "newest_version": "0.17.3", - "description": "A native, synchronous PostgreSQL client", - "homepage": null, - "documentation": null, - "repository": "https://github.com/sfackler/rust-postgres", - "links": { - "version_downloads": "/api/v1/crates/postgres/downloads", - "versions": "/api/v1/crates/postgres/versions", - "owners": "/api/v1/crates/postgres/owners", - "owner_team": "/api/v1/crates/postgres/owner_team", - "owner_user": "/api/v1/crates/postgres/owner_user", - "reverse_dependencies": "/api/v1/crates/postgres/reverse_dependencies" - }, - "exact_match": true - } - ], - "meta": { - "total": 2 - } - }"#; - - // Older versions of curl don't peel off query parameters when looking for - // filenames, so just make both files. - // - // On windows, though, `?` is an invalid character, but we always build curl - // from source there anyway! - fs::write(&dest, content).unwrap(); - if !cfg!(windows) { - fs::write( - &dest.with_file_name("crates?q=postgres&per_page=10"), - content, - ) - .unwrap(); - } -} - -const SEARCH_RESULTS: &str = "\ -hoare = \"0.1.1\" # Design by contract style assertions for Rust -postgres = \"0.17.3\" # A native, synchronous PostgreSQL client -"; - -fn setup() { - let cargo_home = paths::root().join(".cargo"); - fs::create_dir_all(cargo_home).unwrap(); - fs::create_dir_all(&api_path().join("api/v1")).unwrap(); - - // Init a new registry - let _ = repo(®istry_path()) - .file( - "config.json", - &format!(r#"{{"dl":"{0}","api":"{0}"}}"#, api()), - ) - .build(); - - let base = api_path().join("api/v1/crates"); - write_crates(&base); -} - -fn set_cargo_config() { - let config = paths::root().join(".cargo/config"); - - fs::write( - &config, - format!( - r#" - [source.crates-io] - registry = 'https://wut' - replace-with = 'dummy-registry' - - [source.dummy-registry] - registry = '{reg}' - "#, - reg = registry_url(), - ), - ) - .unwrap(); -} - -#[cargo_test] -fn not_update() { - setup(); - set_cargo_config(); - - use cargo::core::{Shell, Source, SourceId}; - use cargo::sources::RegistrySource; - use cargo::util::Config; - - let sid = SourceId::for_registry(®istry_url()).unwrap(); - let cfg = Config::new( - Shell::from_write(Box::new(Vec::new())), - paths::root(), - paths::home().join(".cargo"), - ); - let lock = cfg.acquire_package_cache_lock().unwrap(); - let mut regsrc = RegistrySource::remote(sid, &HashSet::new(), &cfg); - regsrc.update().unwrap(); - drop(lock); - - cargo_process("search postgres") - .with_stdout_contains(SEARCH_RESULTS) - .with_stderr("") // without "Updating ... index" - .run(); -} - -#[cargo_test] -fn replace_default() { - setup(); - set_cargo_config(); - - cargo_process("search postgres") - .with_stdout_contains(SEARCH_RESULTS) - .with_stderr_contains("[..]Updating [..] index") - .run(); -} - -#[cargo_test] -fn simple() { - setup(); - - cargo_process("search postgres --index") - .arg(registry_url().to_string()) - .with_stdout_contains(SEARCH_RESULTS) - .run(); -} - -#[cargo_test] -fn multiple_query_params() { - setup(); - - cargo_process("search postgres sql --index") - .arg(registry_url().to_string()) - .with_stdout_contains(SEARCH_RESULTS) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/shell_quoting.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/shell_quoting.rs deleted file mode 100644 index a45f8c6a0..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/shell_quoting.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! This file tests that when the commands being run are shown -//! in the output, their arguments are quoted properly -//! so that the command can be run in a terminal. - -use cargo_test_support::project; - -#[cargo_test] -fn features_are_quoted() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = ["mikeyhew@example.com"] - - [features] - some_feature = [] - default = ["some_feature"] - "#, - ) - .file("src/main.rs", "fn main() {error}") - .build(); - - p.cargo("check -v") - .env("MSYSTEM", "1") - .with_status(101) - .with_stderr_contains( - r#"[RUNNING] `rustc [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]`"# - ).with_stderr_contains( - r#" -Caused by: - process didn't exit successfully: [..] --cfg 'feature="default"' --cfg 'feature="some_feature"' [..]"# - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/standard_lib.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/standard_lib.rs deleted file mode 100644 index 5e2b57193..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/standard_lib.rs +++ /dev/null @@ -1,692 +0,0 @@ -//! Tests for building the standard library (-Zbuild-std). -//! -//! These tests all use a "mock" standard library so that we don't have to -//! rebuild the real one. There is a separate integration test `build-std` -//! which builds the real thing, but that should be avoided if possible. - -use cargo_test_support::registry::{Dependency, Package}; -use cargo_test_support::ProjectBuilder; -use cargo_test_support::{is_nightly, paths, project, rustc_host, Execs}; -use std::path::{Path, PathBuf}; - -struct Setup { - rustc_wrapper: PathBuf, - real_sysroot: String, -} - -fn setup() -> Option { - if !is_nightly() { - // -Zbuild-std is nightly - // We don't want these tests to run on rust-lang/rust. - return None; - } - - if cfg!(all(target_os = "windows", target_env = "gnu")) { - // FIXME: contains object files that we don't handle yet: - // https://github.com/rust-lang/wg-cargo-std-aware/issues/46 - return None; - } - - // Our mock sysroot requires a few packages from crates.io, so make sure - // they're "published" to crates.io. Also edit their code a bit to make sure - // that they have access to our custom crates with custom apis. - Package::new("registry-dep-using-core", "1.0.0") - .file( - "src/lib.rs", - " - #![no_std] - - #[cfg(feature = \"mockbuild\")] - pub fn custom_api() { - } - - #[cfg(not(feature = \"mockbuild\"))] - pub fn non_sysroot_api() { - core::custom_api(); - } - ", - ) - .add_dep(Dependency::new("rustc-std-workspace-core", "*").optional(true)) - .feature("mockbuild", &["rustc-std-workspace-core"]) - .publish(); - Package::new("registry-dep-using-alloc", "1.0.0") - .file( - "src/lib.rs", - " - #![no_std] - - extern crate alloc; - - #[cfg(feature = \"mockbuild\")] - pub fn custom_api() { - } - - #[cfg(not(feature = \"mockbuild\"))] - pub fn non_sysroot_api() { - core::custom_api(); - alloc::custom_api(); - } - ", - ) - .add_dep(Dependency::new("rustc-std-workspace-core", "*").optional(true)) - .add_dep(Dependency::new("rustc-std-workspace-alloc", "*").optional(true)) - .feature( - "mockbuild", - &["rustc-std-workspace-core", "rustc-std-workspace-alloc"], - ) - .publish(); - Package::new("registry-dep-using-std", "1.0.0") - .file( - "src/lib.rs", - " - #[cfg(feature = \"mockbuild\")] - pub fn custom_api() { - } - - #[cfg(not(feature = \"mockbuild\"))] - pub fn non_sysroot_api() { - std::custom_api(); - } - ", - ) - .add_dep(Dependency::new("rustc-std-workspace-std", "*").optional(true)) - .feature("mockbuild", &["rustc-std-workspace-std"]) - .publish(); - - let p = ProjectBuilder::new(paths::root().join("rustc-wrapper")) - .file( - "src/main.rs", - r#" - use std::process::Command; - use std::env; - fn main() { - let mut args = env::args().skip(1).collect::>(); - - let is_sysroot_crate = env::var_os("RUSTC_BOOTSTRAP").is_some(); - if is_sysroot_crate { - args.push("--sysroot".to_string()); - args.push(env::var("REAL_SYSROOT").unwrap()); - } else if args.iter().any(|arg| arg == "--target") { - // build-std target unit - args.push("--sysroot".to_string()); - args.push("/path/to/nowhere".to_string()); - } else { - // host unit, do not use sysroot - } - - let ret = Command::new(&args[0]).args(&args[1..]).status().unwrap(); - std::process::exit(ret.code().unwrap_or(1)); - } - "#, - ) - .build(); - p.cargo("build").run(); - - Some(Setup { - rustc_wrapper: p.bin("foo"), - real_sysroot: paths::sysroot(), - }) -} - -fn enable_build_std(e: &mut Execs, setup: &Setup) { - // First up, force Cargo to use our "mock sysroot" which mimics what - // libstd looks like upstream. - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/testsuite/mock-std"); - e.env("__CARGO_TESTS_ONLY_SRC_ROOT", &root); - - e.masquerade_as_nightly_cargo(); - - // We do various shenanigans to ensure our "mock sysroot" actually links - // with the real sysroot, so we don't have to actually recompile std for - // each test. Perform all that logic here, namely: - // - // * RUSTC_WRAPPER - uses our shim executable built above to control rustc - // * REAL_SYSROOT - used by the shim executable to swap out to the real - // sysroot temporarily for some compilations - // * RUST{,DOC}FLAGS - an extra `-L` argument to ensure we can always load - // crates from the sysroot, but only indirectly through other crates. - e.env("RUSTC_WRAPPER", &setup.rustc_wrapper); - e.env("REAL_SYSROOT", &setup.real_sysroot); - let libdir = format!("/lib/rustlib/{}/lib", rustc_host()); - e.env( - "RUSTFLAGS", - format!("-Ldependency={}{}", setup.real_sysroot, libdir), - ); - e.env( - "RUSTDOCFLAGS", - format!("-Ldependency={}{}", setup.real_sysroot, libdir), - ); -} - -// Helper methods used in the tests below -trait BuildStd: Sized { - fn build_std(&mut self, setup: &Setup) -> &mut Self; - fn build_std_arg(&mut self, setup: &Setup, arg: &str) -> &mut Self; - fn target_host(&mut self) -> &mut Self; -} - -impl BuildStd for Execs { - fn build_std(&mut self, setup: &Setup) -> &mut Self { - enable_build_std(self, setup); - self.arg("-Zbuild-std"); - self - } - - fn build_std_arg(&mut self, setup: &Setup, arg: &str) -> &mut Self { - enable_build_std(self, setup); - self.arg(format!("-Zbuild-std={}", arg)); - self - } - - fn target_host(&mut self) -> &mut Self { - self.arg("--target").arg(rustc_host()); - self - } -} - -#[cargo_test] -fn basic() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - - let p = project() - .file( - "src/main.rs", - " - fn main() { - std::custom_api(); - foo::f(); - } - - #[test] - fn smoke_bin_unit() { - std::custom_api(); - foo::f(); - } - ", - ) - .file( - "src/lib.rs", - " - extern crate alloc; - extern crate proc_macro; - - /// ``` - /// foo::f(); - /// ``` - pub fn f() { - core::custom_api(); - std::custom_api(); - alloc::custom_api(); - proc_macro::custom_api(); - } - - #[test] - fn smoke_lib_unit() { - std::custom_api(); - f(); - } - ", - ) - .file( - "tests/smoke.rs", - " - #[test] - fn smoke_integration() { - std::custom_api(); - foo::f(); - } - ", - ) - .build(); - - p.cargo("check -v").build_std(&setup).target_host().run(); - p.cargo("build").build_std(&setup).target_host().run(); - p.cargo("run").build_std(&setup).target_host().run(); - p.cargo("test").build_std(&setup).target_host().run(); -} - -#[cargo_test] -fn simple_lib_std() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project().file("src/lib.rs", "").build(); - p.cargo("build -v") - .build_std(&setup) - .target_host() - .with_stderr_contains("[RUNNING] `[..]--crate-name std [..]`") - .run(); - // Check freshness. - p.change_file("src/lib.rs", " "); - p.cargo("build -v") - .build_std(&setup) - .target_host() - .with_stderr_contains("[FRESH] std[..]") - .run(); -} - -#[cargo_test] -fn simple_bin_std() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("run -v").build_std(&setup).target_host().run(); -} - -#[cargo_test] -fn lib_nostd() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - #![no_std] - pub fn foo() { - assert_eq!(u8::MIN, 0); - } - "#, - ) - .build(); - p.cargo("build -v --lib") - .build_std_arg(&setup, "core") - .target_host() - .with_stderr_does_not_contain("[..]libstd[..]") - .run(); -} - -#[cargo_test] -fn check_core() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file("src/lib.rs", "#![no_std] fn unused_fn() {}") - .build(); - - p.cargo("check -v") - .build_std_arg(&setup, "core") - .target_host() - .with_stderr_contains("[WARNING] [..]unused_fn[..]`") - .run(); -} - -#[cargo_test] -fn depend_same_as_std() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - - let p = project() - .file( - "src/lib.rs", - r#" - pub fn f() { - registry_dep_using_core::non_sysroot_api(); - registry_dep_using_alloc::non_sysroot_api(); - registry_dep_using_std::non_sysroot_api(); - } - "#, - ) - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [dependencies] - registry-dep-using-core = "1.0" - registry-dep-using-alloc = "1.0" - registry-dep-using-std = "1.0" - "#, - ) - .build(); - - p.cargo("build -v").build_std(&setup).target_host().run(); -} - -#[cargo_test] -fn test() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - #[cfg(test)] - mod tests { - #[test] - fn it_works() { - assert_eq!(2 + 2, 4); - } - } - "#, - ) - .build(); - - p.cargo("test -v") - .build_std(&setup) - .target_host() - .with_stdout_contains("test tests::it_works ... ok") - .run(); -} - -#[cargo_test] -fn target_proc_macro() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - extern crate proc_macro; - pub fn f() { - let _ts = proc_macro::TokenStream::new(); - } - "#, - ) - .build(); - - p.cargo("build -v").build_std(&setup).target_host().run(); -} - -#[cargo_test] -fn bench() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - #![feature(test)] - extern crate test; - - #[bench] - fn b1(b: &mut test::Bencher) { - b.iter(|| ()) - } - "#, - ) - .build(); - - p.cargo("bench -v").build_std(&setup).target_host().run(); -} - -#[cargo_test] -fn doc() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - /// Doc - pub fn f() -> Result<(), ()> {Ok(())} - "#, - ) - .build(); - - p.cargo("doc -v").build_std(&setup).target_host().run(); -} - -#[cargo_test] -fn check_std() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - " - extern crate core; - extern crate alloc; - extern crate proc_macro; - pub fn f() {} - ", - ) - .file("src/main.rs", "fn main() {}") - .file( - "tests/t1.rs", - r#" - #[test] - fn t1() { - assert_eq!(1, 2); - } - "#, - ) - .build(); - - p.cargo("check -v --all-targets") - .build_std(&setup) - .target_host() - .run(); - p.cargo("check -v --all-targets --profile=test") - .build_std(&setup) - .target_host() - .run(); -} - -#[cargo_test] -fn doctest() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - /// Doc - /// ``` - /// std::custom_api(); - /// ``` - pub fn f() {} - "#, - ) - .build(); - - p.cargo("test --doc -v -Zdoctest-xcompile") - .build_std(&setup) - .with_stdout_contains("test src/lib.rs - f [..] ... ok") - .target_host() - .run(); -} - -#[cargo_test] -fn no_implicit_alloc() { - // Demonstrate that alloc is not implicitly in scope. - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - pub fn f() { - let _: Vec = alloc::vec::Vec::new(); - } - "#, - ) - .build(); - - p.cargo("build -v") - .build_std(&setup) - .target_host() - .with_stderr_contains("[..]use of undeclared [..]`alloc`") - .with_status(101) - .run(); -} - -#[cargo_test] -fn macro_expanded_shadow() { - // This tests a bug caused by the previous use of `--extern` to directly - // load sysroot crates. This necessitated the switch to `--sysroot` to - // retain existing behavior. See - // https://github.com/rust-lang/wg-cargo-std-aware/issues/40 for more - // detail. - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - macro_rules! a { - () => (extern crate std as alloc;) - } - a!(); - "#, - ) - .build(); - - p.cargo("build -v").build_std(&setup).target_host().run(); -} - -#[cargo_test] -fn ignores_incremental() { - // Incremental is not really needed for std, make sure it is disabled. - // Incremental also tends to have bugs that affect std libraries more than - // any other crate. - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project().file("src/lib.rs", "").build(); - p.cargo("build") - .env("CARGO_INCREMENTAL", "1") - .build_std(&setup) - .target_host() - .run(); - let incremental: Vec<_> = p - .glob(format!("target/{}/debug/incremental/*", rustc_host())) - .map(|e| e.unwrap()) - .collect(); - assert_eq!(incremental.len(), 1); - assert!(incremental[0] - .file_name() - .unwrap() - .to_str() - .unwrap() - .starts_with("foo-")); -} - -#[cargo_test] -fn cargo_config_injects_compiler_builtins() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - r#" - #![no_std] - pub fn foo() { - assert_eq!(u8::MIN, 0); - } - "#, - ) - .file( - ".cargo/config.toml", - r#" - [unstable] - build-std = ['core'] - "#, - ) - .build(); - let mut build = p.cargo("build -v --lib"); - enable_build_std(&mut build, &setup); - build - .target_host() - .with_stderr_does_not_contain("[..]libstd[..]") - .run(); -} - -#[cargo_test] -fn different_features() { - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "src/lib.rs", - " - pub fn foo() { - std::conditional_function(); - } - ", - ) - .build(); - p.cargo("build") - .build_std(&setup) - .arg("-Zbuild-std-features=feature1") - .target_host() - .run(); -} - -#[cargo_test] -fn no_roots() { - // Checks for a bug where it would panic if there are no roots. - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project().file("tests/t1.rs", "").build(); - p.cargo("build") - .build_std(&setup) - .target_host() - .with_stderr_contains("[FINISHED] [..]") - .run(); -} - -#[cargo_test] -fn proc_macro_only() { - // Checks for a bug where it would panic if building a proc-macro only - let setup = match setup() { - Some(s) => s, - None => return, - }; - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "pm" - version = "0.1.0" - - [lib] - proc-macro = true - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("build") - .build_std(&setup) - .target_host() - .with_stderr_contains("[FINISHED] [..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/test.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/test.rs deleted file mode 100644 index eb66bc77c..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/test.rs +++ /dev/null @@ -1,4457 +0,0 @@ -//! Tests for the `cargo test` command. - -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::Package; -use cargo_test_support::{ - basic_bin_manifest, basic_lib_manifest, basic_manifest, cargo_exe, project, -}; -use cargo_test_support::{cross_compile, is_nightly, paths}; -use cargo_test_support::{rustc_host, sleep_ms}; -use std::fs; - -#[cargo_test] -fn cargo_test_simple() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[test] - fn test_hello() { - assert_eq!(hello(), "hello") - } - "#, - ) - .build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("hello\n").run(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("test test_hello ... ok") - .run(); -} - -#[cargo_test] -fn cargo_test_release() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar; - pub fn foo() { bar::bar(); } - - #[test] - fn test() { foo(); } - "#, - ) - .file( - "tests/test.rs", - r#" - extern crate foo; - - #[test] - fn test() { foo::foo(); } - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .build(); - - p.cargo("test -v --release") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[RUNNING] [..] -C opt-level=3 [..] -[COMPILING] foo v0.1.0 ([CWD]) -[RUNNING] [..] -C opt-level=3 [..] -[RUNNING] [..] -C opt-level=3 [..] -[RUNNING] [..] -C opt-level=3 [..] -[FINISHED] release [optimized] target(s) in [..] -[RUNNING] `[..]target/release/deps/foo-[..][EXE]` -[RUNNING] `[..]target/release/deps/test-[..][EXE]` -[DOCTEST] foo -[RUNNING] `rustdoc [..]--test [..]lib.rs[..]`", - ) - .with_stdout_contains_n("test test ... ok", 2) - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn cargo_test_overflow_checks() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.5.0" - authors = [] - - [[bin]] - name = "foo" - - [profile.release] - overflow-checks = true - "#, - ) - .file( - "src/foo.rs", - r#" - use std::panic; - pub fn main() { - let r = panic::catch_unwind(|| { - [1, i32::MAX].iter().sum::(); - }); - assert!(r.is_err()); - } - "#, - ) - .build(); - - p.cargo("build --release").run(); - assert!(p.release_bin("foo").is_file()); - - p.process(&p.release_bin("foo")).with_stdout("").run(); -} - -#[cargo_test] -fn cargo_test_quiet_with_harness() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [[test]] - name = "foo" - path = "src/foo.rs" - harness = true - "#, - ) - .file( - "src/foo.rs", - r#" - fn main() {} - #[test] fn test_hello() {} - "#, - ) - .build(); - - p.cargo("test -q") - .with_stdout( - " -running 1 test -. -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] - -", - ) - .with_stderr("") - .run(); -} - -#[cargo_test] -fn cargo_test_quiet_no_harness() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [[bin]] - name = "foo" - test = false - - [[test]] - name = "foo" - path = "src/main.rs" - harness = false - "#, - ) - .file( - "src/main.rs", - r#" - fn main() {} - #[test] fn test_hello() {} - "#, - ) - .build(); - - p.cargo("test -q").with_stdout("").with_stderr("").run(); -} - -#[cargo_test] -fn cargo_doc_test_quiet() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - "#, - ) - .file( - "src/lib.rs", - r#" - /// ``` - /// let result = foo::add(2, 3); - /// assert_eq!(result, 5); - /// ``` - pub fn add(a: i32, b: i32) -> i32 { - a + b - } - - /// ``` - /// let result = foo::div(10, 2); - /// assert_eq!(result, 5); - /// ``` - /// - /// # Panics - /// - /// The function panics if the second argument is zero. - /// - /// ```rust,should_panic - /// // panics on division by zero - /// foo::div(10, 0); - /// ``` - pub fn div(a: i32, b: i32) -> i32 { - if b == 0 { - panic!("Divide-by-zero error"); - } - - a / b - } - - #[test] fn test_hello() {} - "#, - ) - .build(); - - p.cargo("test -q") - .with_stdout( - " -running 1 test -. -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] - - -running 3 tests -... -test result: ok. 3 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] - -", - ) - .with_stderr("") - .run(); -} - -#[cargo_test] -fn cargo_test_verbose() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - fn main() {} - #[test] fn test_hello() {} - "#, - ) - .build(); - - p.cargo("test -v hello") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc [..] src/main.rs [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[CWD]/target/debug/deps/foo-[..] hello` -", - ) - .with_stdout_contains("test test_hello ... ok") - .run(); -} - -#[cargo_test] -fn many_similar_names() { - let p = project() - .file( - "src/lib.rs", - " - pub fn foo() {} - #[test] fn lib_test() {} - ", - ) - .file( - "src/main.rs", - " - extern crate foo; - fn main() {} - #[test] fn bin_test() { foo::foo() } - ", - ) - .file( - "tests/foo.rs", - r#" - extern crate foo; - #[test] fn test_test() { foo::foo() } - "#, - ) - .build(); - - p.cargo("test -v") - .with_stdout_contains("test bin_test ... ok") - .with_stdout_contains("test lib_test ... ok") - .with_stdout_contains("test test_test ... ok") - .run(); -} - -#[cargo_test] -fn cargo_test_failing_test_in_bin() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file( - "src/main.rs", - r#" - fn hello() -> &'static str { - "hello" - } - - pub fn main() { - println!("{}", hello()) - } - - #[test] - fn test_hello() { - assert_eq!(hello(), "nope") - } - "#, - ) - .build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("hello\n").run(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[ERROR] test failed, to rerun pass '--bin foo'", - ) - .with_stdout_contains( - " -running 1 test -test test_hello ... FAILED - -failures: - ----- test_hello stdout ---- -[..]thread '[..]' panicked at 'assertion failed:[..]", - ) - .with_stdout_contains("[..]`(left == right)`[..]") - .with_stdout_contains("[..]left: `\"hello\"`,[..]") - .with_stdout_contains("[..]right: `\"nope\"`[..]") - .with_stdout_contains("[..]src/main.rs:12[..]") - .with_stdout_contains( - "\ -failures: - test_hello -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn cargo_test_failing_test_in_test() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/main.rs", r#"pub fn main() { println!("hello"); }"#) - .file( - "tests/footest.rs", - "#[test] fn test_hello() { assert!(false) }", - ) - .build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - - p.process(&p.bin("foo")).with_stdout("hello\n").run(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/debug/deps/footest-[..][EXE]) -[ERROR] test failed, to rerun pass '--test footest'", - ) - .with_stdout_contains("running 0 tests") - .with_stdout_contains( - "\ -running 1 test -test test_hello ... FAILED - -failures: - ----- test_hello stdout ---- -[..]thread '[..]' panicked at 'assertion failed: false', \ - tests/footest.rs:1[..] -", - ) - .with_stdout_contains( - "\ -failures: - test_hello -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn cargo_test_failing_test_in_lib() { - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "#[test] fn test_hello() { assert!(false) }") - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[ERROR] test failed, to rerun pass '--lib'", - ) - .with_stdout_contains( - "\ -test test_hello ... FAILED - -failures: - ----- test_hello stdout ---- -[..]thread '[..]' panicked at 'assertion failed: false', \ - src/lib.rs:1[..] -", - ) - .with_stdout_contains( - "\ -failures: - test_hello -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn test_with_lib_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "baz" - path = "src/main.rs" - "#, - ) - .file( - "src/lib.rs", - r#" - /// - /// ```rust - /// extern crate foo; - /// fn main() { - /// println!("{:?}", foo::foo()); - /// } - /// ``` - /// - pub fn foo(){} - #[test] fn lib_test() {} - "#, - ) - .file( - "src/main.rs", - " - #[allow(unused_extern_crates)] - extern crate foo; - - fn main() {} - - #[test] - fn bin_test() {} - ", - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/debug/deps/baz-[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("test lib_test ... ok") - .with_stdout_contains("test bin_test ... ok") - .with_stdout_contains_n("test [..] ... ok", 3) - .run(); -} - -#[cargo_test] -fn test_with_deep_lib_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.bar] - path = "../bar" - "#, - ) - .file( - "src/lib.rs", - " - #[cfg(test)] - extern crate bar; - /// ``` - /// foo::foo(); - /// ``` - pub fn foo() {} - - #[test] - fn bar_test() { - bar::bar(); - } - ", - ) - .build(); - let _p2 = project() - .at("bar") - .file("Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("src/lib.rs", "pub fn bar() {} #[test] fn foo_test() {}") - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([..]) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target[..]) -[DOCTEST] foo", - ) - .with_stdout_contains("test bar_test ... ok") - .with_stdout_contains_n("test [..] ... ok", 2) - .run(); -} - -#[cargo_test] -fn external_test_explicit() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[test]] - name = "test" - path = "src/test.rs" - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn get_hello() -> &'static str { "Hello" } - - #[test] - fn internal_test() {} - "#, - ) - .file( - "src/test.rs", - r#" - extern crate foo; - - #[test] - fn external_test() { assert_eq!(foo::get_hello(), "Hello") } - "#, - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/debug/deps/test-[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("test internal_test ... ok") - .with_stdout_contains("test external_test ... ok") - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn external_test_named_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [[test]] - name = "test" - "#, - ) - .file("src/lib.rs", "") - .file("tests/test.rs", "#[test] fn foo() {}") - .build(); - - p.cargo("test").run(); -} - -#[cargo_test] -fn external_test_implicit() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn get_hello() -> &'static str { "Hello" } - - #[test] - fn internal_test() {} - "#, - ) - .file( - "tests/external.rs", - r#" - extern crate foo; - - #[test] - fn external_test() { assert_eq!(foo::get_hello(), "Hello") } - "#, - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/debug/deps/external-[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("test internal_test ... ok") - .with_stdout_contains("test external_test ... ok") - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn dont_run_examples() { - let p = project() - .file("src/lib.rs", "") - .file( - "examples/dont-run-me-i-will-fail.rs", - r#" - fn main() { panic!("Examples should not be run by 'cargo test'"); } - "#, - ) - .build(); - p.cargo("test").run(); -} - -#[cargo_test] -fn pass_through_command_line() { - let p = project() - .file( - "src/lib.rs", - " - #[test] fn foo() {} - #[test] fn bar() {} - ", - ) - .build(); - - p.cargo("test bar") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -", - ) - .with_stdout_contains("running 1 test") - .with_stdout_contains("test bar ... ok") - .run(); - - p.cargo("test foo") - .with_stderr( - "\ -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -", - ) - .with_stdout_contains("running 1 test") - .with_stdout_contains("test foo ... ok") - .run(); -} - -// Regression test for running cargo-test twice with -// tests in an rlib -#[cargo_test] -fn cargo_test_twice() { - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file( - "src/foo.rs", - r#" - #![crate_type = "rlib"] - - #[test] - fn dummy_test() { } - "#, - ) - .build(); - - for _ in 0..2 { - p.cargo("test").run(); - } -} - -#[cargo_test] -fn lib_bin_same_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - [[bin]] - name = "foo" - "#, - ) - .file("src/lib.rs", "#[test] fn lib_test() {}") - .file( - "src/main.rs", - " - #[allow(unused_extern_crates)] - extern crate foo; - - #[test] - fn bin_test() {} - ", - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains_n("test [..] ... ok", 2) - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn lib_with_standard_name() { - let p = project() - .file("Cargo.toml", &basic_manifest("syntax", "0.0.1")) - .file( - "src/lib.rs", - " - /// ``` - /// syntax::foo(); - /// ``` - pub fn foo() {} - - #[test] - fn foo_test() {} - ", - ) - .file( - "tests/test.rs", - " - extern crate syntax; - - #[test] - fn test() { syntax::foo() } - ", - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] syntax v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/syntax-[..][EXE]) -[RUNNING] [..] (target/debug/deps/test-[..][EXE]) -[DOCTEST] syntax", - ) - .with_stdout_contains("test foo_test ... ok") - .with_stdout_contains("test test ... ok") - .with_stdout_contains_n("test [..] ... ok", 3) - .run(); -} - -#[cargo_test] -fn lib_with_standard_name2() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - name = "syntax" - test = false - doctest = false - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/main.rs", - " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ", - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] syntax v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/syntax-[..][EXE])", - ) - .with_stdout_contains("test test ... ok") - .run(); -} - -#[cargo_test] -fn lib_without_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/main.rs", - " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ", - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] syntax v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/syntax-[..][EXE])", - ) - .with_stdout_contains("test test ... ok") - .run(); -} - -#[cargo_test] -fn bin_without_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - - [[bin]] - path = "src/main.rs" - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/main.rs", - " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ", - ) - .build(); - - p.cargo("test") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - binary target bin.name is required", - ) - .run(); -} - -#[cargo_test] -fn bench_without_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - - [[bench]] - path = "src/bench.rs" - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/main.rs", - " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ", - ) - .file( - "src/bench.rs", - " - #![feature(test)] - extern crate syntax; - extern crate test; - - #[bench] - fn external_bench(_b: &mut test::Bencher) {} - ", - ) - .build(); - - p.cargo("test") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - benchmark target bench.name is required", - ) - .run(); -} - -#[cargo_test] -fn test_without_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - - [[test]] - path = "src/test.rs" - "#, - ) - .file( - "src/lib.rs", - r#" - pub fn foo() {} - pub fn get_hello() -> &'static str { "Hello" } - "#, - ) - .file( - "src/main.rs", - " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ", - ) - .file( - "src/test.rs", - r#" - extern crate syntax; - - #[test] - fn external_test() { assert_eq!(syntax::get_hello(), "Hello") } - "#, - ) - .build(); - - p.cargo("test") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - test target test.name is required", - ) - .run(); -} - -#[cargo_test] -fn example_without_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "syntax" - version = "0.0.1" - authors = [] - - [lib] - test = false - doctest = false - - [[example]] - path = "examples/example.rs" - "#, - ) - .file("src/lib.rs", "pub fn foo() {}") - .file( - "src/main.rs", - " - extern crate syntax; - - fn main() {} - - #[test] - fn test() { syntax::foo() } - ", - ) - .file( - "examples/example.rs", - r#" - extern crate syntax; - - fn main() { - println!("example1"); - } - "#, - ) - .build(); - - p.cargo("test") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to parse manifest at `[..]` - -Caused by: - example target example.name is required", - ) - .run(); -} - -#[cargo_test] -fn bin_there_for_integration() { - let p = project() - .file( - "src/main.rs", - " - fn main() { std::process::exit(101); } - #[test] fn main_test() {} - ", - ) - .file( - "tests/foo.rs", - r#" - use std::process::Command; - #[test] - fn test_test() { - let status = Command::new("target/debug/foo").status().unwrap(); - assert_eq!(status.code(), Some(101)); - } - "#, - ) - .build(); - - p.cargo("test -v") - .with_stdout_contains("test main_test ... ok") - .with_stdout_contains("test test_test ... ok") - .run(); -} - -#[cargo_test] -fn test_dylib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate_type = ["dylib"] - - [dependencies.bar] - path = "bar" - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar as the_bar; - - pub fn bar() { the_bar::baz(); } - - #[test] - fn foo() { bar(); } - "#, - ) - .file( - "tests/test.rs", - r#" - extern crate foo as the_foo; - - #[test] - fn foo() { the_foo::bar(); } - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [lib] - name = "bar" - crate_type = ["dylib"] - "#, - ) - .file("bar/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] bar v0.0.1 ([CWD]/bar) -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/debug/deps/test-[..][EXE])", - ) - .with_stdout_contains_n("test foo ... ok", 2) - .run(); - - p.root().move_into_the_past(); - p.cargo("test") - .with_stderr( - "\ -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/debug/deps/test-[..][EXE])", - ) - .with_stdout_contains_n("test foo ... ok", 2) - .run(); -} - -#[cargo_test] -fn test_twice_with_build_cmd() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "#[test] fn foo() {}") - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("test foo ... ok") - .with_stdout_contains("running 0 tests") - .run(); - - p.cargo("test") - .with_stderr( - "\ -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("test foo ... ok") - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn test_then_build() { - let p = project().file("src/lib.rs", "#[test] fn foo() {}").build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("test foo ... ok") - .with_stdout_contains("running 0 tests") - .run(); - - p.cargo("build").with_stdout("").run(); -} - -#[cargo_test] -fn test_no_run() { - let p = project() - .file("src/lib.rs", "#[test] fn foo() { panic!() }") - .build(); - - p.cargo("test --no-run") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn test_run_specific_bin_target() { - let prj = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name="bin1" - path="src/bin1.rs" - - [[bin]] - name="bin2" - path="src/bin2.rs" - "#, - ) - .file("src/bin1.rs", "#[test] fn test1() { }") - .file("src/bin2.rs", "#[test] fn test2() { }") - .build(); - - prj.cargo("test --bin bin2") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/bin2-[..][EXE])", - ) - .with_stdout_contains("test test2 ... ok") - .run(); -} - -#[cargo_test] -fn test_run_implicit_bin_target() { - let prj = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name="mybin" - path="src/mybin.rs" - "#, - ) - .file( - "src/mybin.rs", - "#[test] fn test_in_bin() { } - fn main() { panic!(\"Don't execute me!\"); }", - ) - .file("tests/mytest.rs", "#[test] fn test_in_test() { }") - .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") - .file( - "examples/myexm.rs", - "#[test] fn test_in_exm() { } - fn main() { panic!(\"Don't execute me!\"); }", - ) - .build(); - - prj.cargo("test --bins") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/mybin-[..][EXE])", - ) - .with_stdout_contains("test test_in_bin ... ok") - .run(); -} - -#[cargo_test] -fn test_run_specific_test_target() { - let prj = project() - .file("src/bin/a.rs", "fn main() { }") - .file("src/bin/b.rs", "#[test] fn test_b() { } fn main() { }") - .file("tests/a.rs", "#[test] fn test_a() { }") - .file("tests/b.rs", "#[test] fn test_b() { }") - .build(); - - prj.cargo("test --test b") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/b-[..][EXE])", - ) - .with_stdout_contains("test test_b ... ok") - .run(); -} - -#[cargo_test] -fn test_run_implicit_test_target() { - let prj = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name="mybin" - path="src/mybin.rs" - "#, - ) - .file( - "src/mybin.rs", - "#[test] fn test_in_bin() { } - fn main() { panic!(\"Don't execute me!\"); }", - ) - .file("tests/mytest.rs", "#[test] fn test_in_test() { }") - .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") - .file( - "examples/myexm.rs", - "fn main() { compile_error!(\"Don't build me!\"); }", - ) - .build(); - - prj.cargo("test --tests") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/mybin-[..][EXE]) -[RUNNING] [..] (target/debug/deps/mytest-[..][EXE])", - ) - .with_stdout_contains("test test_in_test ... ok") - .run(); -} - -#[cargo_test] -fn test_run_implicit_bench_target() { - let prj = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name="mybin" - path="src/mybin.rs" - "#, - ) - .file( - "src/mybin.rs", - "#[test] fn test_in_bin() { } - fn main() { panic!(\"Don't execute me!\"); }", - ) - .file("tests/mytest.rs", "#[test] fn test_in_test() { }") - .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") - .file( - "examples/myexm.rs", - "fn main() { compile_error!(\"Don't build me!\"); }", - ) - .build(); - - prj.cargo("test --benches") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/mybin-[..][EXE]) -[RUNNING] [..] (target/debug/deps/mybench-[..][EXE])", - ) - .with_stdout_contains("test test_in_bench ... ok") - .run(); -} - -#[cargo_test] -fn test_run_implicit_example_target() { - let prj = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "mybin" - path = "src/mybin.rs" - - [[example]] - name = "myexm1" - - [[example]] - name = "myexm2" - test = true - "#, - ) - .file( - "src/mybin.rs", - "#[test] fn test_in_bin() { } - fn main() { panic!(\"Don't execute me!\"); }", - ) - .file("tests/mytest.rs", "#[test] fn test_in_test() { }") - .file("benches/mybench.rs", "#[test] fn test_in_bench() { }") - .file( - "examples/myexm1.rs", - "#[test] fn test_in_exm() { } - fn main() { panic!(\"Don't execute me!\"); }", - ) - .file( - "examples/myexm2.rs", - "#[test] fn test_in_exm() { } - fn main() { panic!(\"Don't execute me!\"); }", - ) - .build(); - - // Compiles myexm1 as normal, but does not run it. - prj.cargo("test -v") - .with_stderr_contains("[RUNNING] `rustc [..]myexm1.rs [..]--crate-type bin[..]") - .with_stderr_contains("[RUNNING] `rustc [..]myexm2.rs [..]--test[..]") - .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]") - .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") - .run(); - - // Only tests myexm2. - prj.cargo("test --tests") - .with_stderr_does_not_contain("[RUNNING] [..]myexm1-[..]") - .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") - .run(); - - // Tests all examples. - prj.cargo("test --examples") - .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") - .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") - .run(); - - // Test an example, even without `test` set. - prj.cargo("test --example myexm1") - .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") - .run(); - - // Tests all examples. - prj.cargo("test --all-targets") - .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm1-[..]") - .with_stderr_contains("[RUNNING] [..]target/debug/examples/myexm2-[..]") - .run(); -} - -#[cargo_test] -fn test_filtered_excludes_compiling_examples() { - let p = project() - .file( - "src/lib.rs", - "#[cfg(test)] mod tests { #[test] fn foo() { assert!(true); } }", - ) - .file("examples/ex1.rs", "fn main() {}") - .build(); - - p.cargo("test -v foo") - .with_stdout( - " -running 1 test -test tests::foo ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] - -", - ) - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc --crate-name foo src/lib.rs [..] --test [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[CWD]/target/debug/deps/foo-[..] foo` -", - ) - .with_stderr_does_not_contain("[RUNNING][..]rustc[..]ex1[..]") - .run(); -} - -#[cargo_test] -fn test_no_harness() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [[bin]] - name = "foo" - test = false - - [[test]] - name = "bar" - path = "foo.rs" - harness = false - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("foo.rs", "fn main() {}") - .build(); - - p.cargo("test -- --nocapture") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/bar-[..][EXE]) -", - ) - .run(); -} - -#[cargo_test] -fn selective_testing() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - [dependencies.d2] - path = "d2" - - [lib] - name = "foo" - doctest = false - "#, - ) - .file("src/lib.rs", "") - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [lib] - name = "d1" - doctest = false - "#, - ) - .file("d1/src/lib.rs", "") - .file( - "d1/src/main.rs", - "#[allow(unused_extern_crates)] extern crate d1; fn main() {}", - ) - .file( - "d2/Cargo.toml", - r#" - [package] - name = "d2" - version = "0.0.1" - authors = [] - - [lib] - name = "d2" - doctest = false - "#, - ) - .file("d2/src/lib.rs", "") - .file( - "d2/src/main.rs", - "#[allow(unused_extern_crates)] extern crate d2; fn main() {}", - ); - let p = p.build(); - - println!("d1"); - p.cargo("test -p d1") - .with_stderr( - "\ -[COMPILING] d1 v0.0.1 ([CWD]/d1) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/d1-[..][EXE]) -[RUNNING] [..] (target/debug/deps/d1-[..][EXE])", - ) - .with_stdout_contains_n("running 0 tests", 2) - .run(); - - println!("d2"); - p.cargo("test -p d2") - .with_stderr( - "\ -[COMPILING] d2 v0.0.1 ([CWD]/d2) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/d2-[..][EXE]) -[RUNNING] [..] (target/debug/deps/d2-[..][EXE])", - ) - .with_stdout_contains_n("running 0 tests", 2) - .run(); - - println!("whole"); - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE])", - ) - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn almost_cyclic_but_not_quite() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies.b] - path = "b" - [dev-dependencies.c] - path = "c" - "#, - ) - .file( - "src/lib.rs", - r#" - #[cfg(test)] extern crate b; - #[cfg(test)] extern crate c; - "#, - ) - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.0.1" - authors = [] - - [dependencies.foo] - path = ".." - "#, - ) - .file( - "b/src/lib.rs", - r#" - #[allow(unused_extern_crates)] - extern crate foo; - "#, - ) - .file("c/Cargo.toml", &basic_manifest("c", "0.0.1")) - .file("c/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.cargo("test").run(); -} - -#[cargo_test] -fn build_then_selective_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.b] - path = "b" - "#, - ) - .file( - "src/lib.rs", - "#[allow(unused_extern_crates)] extern crate b;", - ) - .file( - "src/main.rs", - r#" - #[allow(unused_extern_crates)] - extern crate b; - #[allow(unused_extern_crates)] - extern crate foo; - fn main() {} - "#, - ) - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - p.root().move_into_the_past(); - p.cargo("test -p b").run(); -} - -#[cargo_test] -fn example_dev_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies.bar] - path = "bar" - "#, - ) - .file("src/lib.rs", "") - .file("examples/e1.rs", "extern crate bar; fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file( - "bar/src/lib.rs", - r#" - // make sure this file takes awhile to compile - macro_rules! f0( () => (1) ); - macro_rules! f1( () => ({(f0!()) + (f0!())}) ); - macro_rules! f2( () => ({(f1!()) + (f1!())}) ); - macro_rules! f3( () => ({(f2!()) + (f2!())}) ); - macro_rules! f4( () => ({(f3!()) + (f3!())}) ); - macro_rules! f5( () => ({(f4!()) + (f4!())}) ); - macro_rules! f6( () => ({(f5!()) + (f5!())}) ); - macro_rules! f7( () => ({(f6!()) + (f6!())}) ); - macro_rules! f8( () => ({(f7!()) + (f7!())}) ); - pub fn bar() { - f8!(); - } - "#, - ) - .build(); - p.cargo("test").run(); - p.cargo("run --example e1 --release -v").run(); -} - -#[cargo_test] -fn selective_testing_with_docs() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - "#, - ) - .file( - "src/lib.rs", - r#" - /// ``` - /// not valid rust - /// ``` - pub fn foo() {} - "#, - ) - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [lib] - name = "d1" - path = "d1.rs" - "#, - ) - .file("d1/d1.rs", ""); - let p = p.build(); - - p.cargo("test -p d1") - .with_stderr( - "\ -[COMPILING] d1 v0.0.1 ([CWD]/d1) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/d1[..][EXE]) -[DOCTEST] d1", - ) - .with_stdout_contains_n("running 0 tests", 2) - .run(); -} - -#[cargo_test] -fn example_bin_same_name() { - let p = project() - .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) - .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) - .build(); - - p.cargo("test --no-run -v") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc [..]` -[RUNNING] `rustc [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - assert!(!p.bin("foo").is_file()); - assert!(p.bin("examples/foo").is_file()); - - p.process(&p.bin("examples/foo")) - .with_stdout("example\n") - .run(); - - p.cargo("run") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..]", - ) - .with_stdout("bin") - .run(); - assert!(p.bin("foo").is_file()); -} - -#[cargo_test] -fn test_with_example_twice() { - let p = project() - .file("src/bin/foo.rs", r#"fn main() { println!("bin"); }"#) - .file("examples/foo.rs", r#"fn main() { println!("example"); }"#) - .build(); - - println!("first"); - p.cargo("test -v").run(); - assert!(p.bin("examples/foo").is_file()); - println!("second"); - p.cargo("test -v").run(); - assert!(p.bin("examples/foo").is_file()); -} - -#[cargo_test] -fn example_with_dev_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - test = false - doctest = false - - [dev-dependencies.a] - path = "a" - "#, - ) - .file("src/lib.rs", "") - .file( - "examples/ex.rs", - "#[allow(unused_extern_crates)] extern crate a; fn main() {}", - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("test -v") - .with_stderr( - "\ -[..] -[..] -[..] -[..] -[RUNNING] `rustc --crate-name ex [..] --extern a=[..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn bin_is_preserved() { - let p = project() - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -v").run(); - assert!(p.bin("foo").is_file()); - - println!("test"); - p.cargo("test -v").run(); - assert!(p.bin("foo").is_file()); -} - -#[cargo_test] -fn bad_example() { - let p = project().file("src/lib.rs", ""); - let p = p.build(); - - p.cargo("run --example foo") - .with_status(101) - .with_stderr("[ERROR] no example target named `foo`") - .run(); - p.cargo("run --bin foo") - .with_status(101) - .with_stderr("[ERROR] no bin target named `foo`") - .run(); -} - -#[cargo_test] -fn doctest_feature() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - [features] - bar = [] - "#, - ) - .file( - "src/lib.rs", - r#" - /// ```rust - /// assert_eq!(foo::foo(), 1); - /// ``` - #[cfg(feature = "bar")] - pub fn foo() -> i32 { 1 } - "#, - ) - .build(); - - p.cargo("test --features bar") - .with_stderr( - "\ -[COMPILING] foo [..] -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("running 0 tests") - .with_stdout_contains("test [..] ... ok") - .run(); -} - -#[cargo_test] -fn dashes_to_underscores() { - let p = project() - .file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1")) - .file( - "src/lib.rs", - r#" - /// ``` - /// assert_eq!(foo_bar::foo(), 1); - /// ``` - pub fn foo() -> i32 { 1 } - "#, - ) - .build(); - - p.cargo("test -v").run(); -} - -#[cargo_test] -fn doctest_dev_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies] - b = { path = "b" } - "#, - ) - .file( - "src/lib.rs", - r#" - /// ``` - /// extern crate b; - /// ``` - pub fn foo() {} - "#, - ) - .file("b/Cargo.toml", &basic_manifest("b", "0.0.1")) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("test -v").run(); -} - -#[cargo_test] -fn filter_no_doc_tests() { - let p = project() - .file( - "src/lib.rs", - r#" - /// ``` - /// extern crate b; - /// ``` - pub fn foo() {} - "#, - ) - .file("tests/foo.rs", "") - .build(); - - p.cargo("test --test=foo") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo[..][EXE])", - ) - .with_stdout_contains("running 0 tests") - .run(); -} - -#[cargo_test] -fn dylib_doctest() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate-type = ["rlib", "dylib"] - test = false - "#, - ) - .file( - "src/lib.rs", - r#" - /// ``` - /// foo::foo(); - /// ``` - pub fn foo() {} - "#, - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[DOCTEST] foo", - ) - .with_stdout_contains("test [..] ... ok") - .run(); -} - -#[cargo_test] -fn dylib_doctest2() { - // Can't doc-test dylibs, as they're statically linked together. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - name = "foo" - crate-type = ["dylib"] - test = false - "#, - ) - .file( - "src/lib.rs", - r#" - /// ``` - /// foo::foo(); - /// ``` - pub fn foo() {} - "#, - ) - .build(); - - p.cargo("test").with_stdout("").run(); -} - -#[cargo_test] -fn cyclic_dev_dep_doc_test() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies] - bar = { path = "bar" } - "#, - ) - .file( - "src/lib.rs", - r#" - //! ``` - //! extern crate bar; - //! ``` - "#, - ) - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - foo = { path = ".." } - "#, - ) - .file( - "bar/src/lib.rs", - r#" - #[allow(unused_extern_crates)] - extern crate foo; - "#, - ) - .build(); - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[COMPILING] bar v0.0.1 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("running 0 tests") - .with_stdout_contains("test [..] ... ok") - .run(); -} - -#[cargo_test] -fn dev_dep_with_build_script() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dev-dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file("examples/foo.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - build = "build.rs" - "#, - ) - .file("bar/src/lib.rs", "") - .file("bar/build.rs", "fn main() {}") - .build(); - p.cargo("test").run(); -} - -#[cargo_test] -fn no_fail_fast() { - let p = project() - .file( - "src/lib.rs", - r#" - pub fn add_one(x: i32) -> i32{ - x + 1 - } - - /// ```rust - /// use foo::sub_one; - /// assert_eq!(sub_one(101), 100); - /// ``` - pub fn sub_one(x: i32) -> i32{ - x - 1 - } - "#, - ) - .file( - "tests/test_add_one.rs", - r#" - extern crate foo; - use foo::*; - - #[test] - fn add_one_test() { - assert_eq!(add_one(1), 2); - } - - #[test] - fn fail_add_one_test() { - assert_eq!(add_one(1), 1); - } - "#, - ) - .file( - "tests/test_sub_one.rs", - r#" - extern crate foo; - use foo::*; - - #[test] - fn sub_one_test() { - assert_eq!(sub_one(1), 0); - } - "#, - ) - .build(); - p.cargo("test --no-fail-fast") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..][EXE]) -[RUNNING] [..] (target/debug/deps/test_add_one-[..][EXE])", - ) - .with_stdout_contains("running 0 tests") - .with_stderr_contains( - "\ -[RUNNING] [..] (target/debug/deps/test_sub_one-[..][EXE]) -[DOCTEST] foo", - ) - .with_stdout_contains("test result: FAILED. [..]") - .with_stdout_contains("test sub_one_test ... ok") - .with_stdout_contains_n("test [..] ... ok", 3) - .run(); -} - -#[cargo_test] -fn test_multiple_packages() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies.d1] - path = "d1" - [dependencies.d2] - path = "d2" - - [lib] - name = "foo" - doctest = false - "#, - ) - .file("src/lib.rs", "") - .file( - "d1/Cargo.toml", - r#" - [package] - name = "d1" - version = "0.0.1" - authors = [] - - [lib] - name = "d1" - doctest = false - "#, - ) - .file("d1/src/lib.rs", "") - .file( - "d2/Cargo.toml", - r#" - [package] - name = "d2" - version = "0.0.1" - authors = [] - - [lib] - name = "d2" - doctest = false - "#, - ) - .file("d2/src/lib.rs", ""); - let p = p.build(); - - p.cargo("test -p d1 -p d2") - .with_stderr_contains("[RUNNING] [..] (target/debug/deps/d1-[..][EXE])") - .with_stderr_contains("[RUNNING] [..] (target/debug/deps/d2-[..][EXE])") - .with_stdout_contains_n("running 0 tests", 2) - .run(); -} - -#[cargo_test] -fn bin_does_not_rebuild_tests() { - let p = project() - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .file("tests/foo.rs", ""); - let p = p.build(); - - p.cargo("test -v").run(); - - sleep_ms(1000); - fs::write(p.root().join("src/main.rs"), "fn main() { 3; }").unwrap(); - - p.cargo("test -v --no-run") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..] src/main.rs [..]` -[RUNNING] `rustc [..] src/main.rs [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn selective_test_wonky_profile() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.release] - opt-level = 2 - - [dependencies] - a = { path = "a" } - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", ""); - let p = p.build(); - - p.cargo("test -v --no-run --release -p foo -p a").run(); -} - -#[cargo_test] -fn selective_test_optional_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a", optional = true } - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", ""); - let p = p.build(); - - p.cargo("test -v --no-run --features a -p a") - .with_stderr( - "\ -[COMPILING] a v0.0.1 ([..]) -[RUNNING] `rustc [..] a/src/lib.rs [..]` -[RUNNING] `rustc [..] a/src/lib.rs [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn only_test_docs() { - let p = project() - .file( - "src/lib.rs", - r#" - #[test] - fn foo() { - let a: u32 = "hello"; - } - - /// ``` - /// foo::bar(); - /// println!("ok"); - /// ``` - pub fn bar() { - } - "#, - ) - .file("tests/foo.rs", "this is not rust"); - let p = p.build(); - - p.cargo("test --doc") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[DOCTEST] foo", - ) - .with_stdout_contains("test [..] ... ok") - .run(); -} - -#[cargo_test] -fn test_panic_abort_with_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = { path = "bar" } - - [profile.dev] - panic = 'abort' - "#, - ) - .file( - "src/lib.rs", - r#" - extern crate bar; - - #[test] - fn foo() {} - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1")) - .file("bar/src/lib.rs", "") - .build(); - p.cargo("test -v").run(); -} - -#[cargo_test] -fn cfg_test_even_with_no_harness() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [lib] - harness = false - doctest = false - "#, - ) - .file( - "src/lib.rs", - r#"#[cfg(test)] fn main() { println!("hello!"); }"#, - ) - .build(); - p.cargo("test -v") - .with_stdout("hello!\n") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([..]) -[RUNNING] `rustc [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `[..]` -", - ) - .run(); -} - -#[cargo_test] -fn panic_abort_multiple() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - - [profile.release] - panic = 'abort' - "#, - ) - .file( - "src/lib.rs", - "#[allow(unused_extern_crates)] extern crate a;", - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "") - .build(); - p.cargo("test --release -v -p foo -p a").run(); -} - -#[cargo_test] -fn pass_correct_cfgs_flags_to_rustdoc() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - authors = [] - - [features] - default = ["feature_a/default"] - nightly = ["feature_a/nightly"] - - [dependencies.feature_a] - path = "libs/feature_a" - default-features = false - "#, - ) - .file( - "src/lib.rs", - r#" - #[cfg(test)] - mod tests { - #[test] - fn it_works() { - assert!(true); - } - } - "#, - ) - .file( - "libs/feature_a/Cargo.toml", - r#" - [package] - name = "feature_a" - version = "0.1.0" - authors = [] - - [features] - default = ["mock_serde_codegen"] - nightly = ["mock_serde_derive"] - - [dependencies] - mock_serde_derive = { path = "../mock_serde_derive", optional = true } - - [build-dependencies] - mock_serde_codegen = { path = "../mock_serde_codegen", optional = true } - "#, - ) - .file( - "libs/feature_a/src/lib.rs", - r#" - #[cfg(feature = "mock_serde_derive")] - const MSG: &'static str = "This is safe"; - - #[cfg(feature = "mock_serde_codegen")] - const MSG: &'static str = "This is risky"; - - pub fn get() -> &'static str { - MSG - } - "#, - ) - .file( - "libs/mock_serde_derive/Cargo.toml", - &basic_manifest("mock_serde_derive", "0.1.0"), - ) - .file("libs/mock_serde_derive/src/lib.rs", "") - .file( - "libs/mock_serde_codegen/Cargo.toml", - &basic_manifest("mock_serde_codegen", "0.1.0"), - ) - .file("libs/mock_serde_codegen/src/lib.rs", ""); - let p = p.build(); - - p.cargo("test --package feature_a --verbose") - .with_stderr_contains( - "\ -[DOCTEST] feature_a -[RUNNING] `rustdoc [..]--test [..]mock_serde_codegen[..]`", - ) - .run(); - - p.cargo("test --verbose") - .with_stderr_contains( - "\ -[DOCTEST] foo -[RUNNING] `rustdoc [..]--test [..]feature_a[..]`", - ) - .run(); -} - -#[cargo_test] -fn test_release_ignore_panic() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - - [profile.test] - panic = 'abort' - [profile.release] - panic = 'abort' - "#, - ) - .file( - "src/lib.rs", - "#[allow(unused_extern_crates)] extern crate a;", - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", ""); - let p = p.build(); - println!("test"); - p.cargo("test -v").run(); - println!("bench"); - p.cargo("bench -v").run(); -} - -#[cargo_test] -fn test_many_with_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - a = { path = "a" } - - [features] - foo = [] - - [workspace] - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.0.1")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("test -v -p a -p foo --features foo").run(); -} - -#[cargo_test] -fn test_all_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "#[test] fn foo_test() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] fn bar_test() {}") - .build(); - - p.cargo("test --workspace") - .with_stdout_contains("test foo_test ... ok") - .with_stdout_contains("test bar_test ... ok") - .run(); -} - -#[cargo_test] -fn test_all_exclude() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "#[test] pub fn baz() { assert!(false); }") - .build(); - - p.cargo("test --workspace --exclude baz") - .with_stdout_contains( - "running 1 test -test bar ... ok", - ) - .run(); -} - -#[cargo_test] -fn test_all_exclude_not_found() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] pub fn bar() {}") - .build(); - - p.cargo("test --workspace --exclude baz") - .with_stderr_contains("[WARNING] excluded package(s) `baz` not found in workspace [..]") - .with_stdout_contains( - "running 1 test -test bar ... ok", - ) - .run(); -} - -#[cargo_test] -fn test_all_exclude_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "#[test] pub fn baz() { assert!(false); }") - .build(); - - p.cargo("test --workspace --exclude '*z'") - .with_stdout_contains( - "running 1 test -test bar ... ok", - ) - .run(); -} - -#[cargo_test] -fn test_all_exclude_glob_not_found() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] pub fn bar() {}") - .build(); - - p.cargo("test --workspace --exclude '*z'") - .with_stderr_contains( - "[WARNING] excluded package pattern(s) `*z` not found in workspace [..]", - ) - .with_stdout_contains( - "running 1 test -test bar ... ok", - ) - .run(); -} - -#[cargo_test] -fn test_all_exclude_broken_glob() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - p.cargo("test --workspace --exclude '[*z'") - .with_status(101) - .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`") - .run(); -} - -#[cargo_test] -fn test_all_virtual_manifest() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", "#[test] fn a() {}") - .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) - .file("b/src/lib.rs", "#[test] fn b() {}") - .build(); - - p.cargo("test --workspace") - .with_stdout_contains("running 1 test\ntest a ... ok") - .with_stdout_contains("running 1 test\ntest b ... ok") - .run(); -} - -#[cargo_test] -fn test_virtual_manifest_all_implied() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", "#[test] fn a() {}") - .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) - .file("b/src/lib.rs", "#[test] fn b() {}") - .build(); - - p.cargo("test") - .with_stdout_contains("running 1 test\ntest a ... ok") - .with_stdout_contains("running 1 test\ntest b ... ok") - .run(); -} - -#[cargo_test] -fn test_virtual_manifest_one_project() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "#[test] fn baz() { assert!(false); }") - .build(); - - p.cargo("test -p bar") - .with_stdout_contains("running 1 test\ntest bar ... ok") - .with_stdout_does_not_contain("running 1 test\ntest baz ... ok") - .run(); -} - -#[cargo_test] -fn test_virtual_manifest_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] fn bar() { assert!(false); }") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "#[test] fn baz() {}") - .build(); - - p.cargo("test -p '*z'") - .with_stdout_does_not_contain("running 1 test\ntest bar ... ok") - .with_stdout_contains("running 1 test\ntest baz ... ok") - .run(); -} - -#[cargo_test] -fn test_virtual_manifest_glob_not_found() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] fn bar() {}") - .build(); - - p.cargo("test -p bar -p '*z'") - .with_status(101) - .with_stderr("[ERROR] package pattern(s) `*z` not found in workspace [..]") - .run(); -} - -#[cargo_test] -fn test_virtual_manifest_broken_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "#[test] fn bar() {}") - .build(); - - p.cargo("test -p '[*z'") - .with_status(101) - .with_stderr_contains("[ERROR] cannot build glob pattern from `[*z`") - .run(); -} - -#[cargo_test] -fn test_all_member_dependency_same_name() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [project] - name = "a" - version = "0.1.0" - - [dependencies] - a = "0.1.0" - "#, - ) - .file("a/src/lib.rs", "#[test] fn a() {}") - .build(); - - Package::new("a", "0.1.0").publish(); - - p.cargo("test --workspace") - .with_stdout_contains("test a ... ok") - .run(); -} - -#[cargo_test] -fn doctest_only_with_dev_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "a" - version = "0.1.0" - - [dev-dependencies] - b = { path = "b" } - "#, - ) - .file( - "src/lib.rs", - r#" - /// ``` - /// extern crate b; - /// - /// b::b(); - /// ``` - pub fn a() {} - "#, - ) - .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) - .file("b/src/lib.rs", "pub fn b() {}") - .build(); - - p.cargo("test --doc -v").run(); -} - -#[cargo_test] -fn test_many_targets() { - let p = project() - .file( - "src/bin/a.rs", - r#" - fn main() {} - #[test] fn bin_a() {} - "#, - ) - .file( - "src/bin/b.rs", - r#" - fn main() {} - #[test] fn bin_b() {} - "#, - ) - .file( - "src/bin/c.rs", - r#" - fn main() {} - #[test] fn bin_c() { panic!(); } - "#, - ) - .file( - "examples/a.rs", - r#" - fn main() {} - #[test] fn example_a() {} - "#, - ) - .file( - "examples/b.rs", - r#" - fn main() {} - #[test] fn example_b() {} - "#, - ) - .file("examples/c.rs", "#[test] fn example_c() { panic!(); }") - .file("tests/a.rs", "#[test] fn test_a() {}") - .file("tests/b.rs", "#[test] fn test_b() {}") - .file("tests/c.rs", "does not compile") - .build(); - - p.cargo("test --verbose --bin a --bin b --example a --example b --test a --test b") - .with_stdout_contains("test bin_a ... ok") - .with_stdout_contains("test bin_b ... ok") - .with_stdout_contains("test test_a ... ok") - .with_stdout_contains("test test_b ... ok") - .with_stderr_contains("[RUNNING] `rustc --crate-name a examples/a.rs [..]`") - .with_stderr_contains("[RUNNING] `rustc --crate-name b examples/b.rs [..]`") - .run(); -} - -#[cargo_test] -fn doctest_and_registry() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "a" - version = "0.1.0" - - [dependencies] - b = { path = "b" } - c = { path = "c" } - - [workspace] - "#, - ) - .file("src/lib.rs", "") - .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) - .file( - "b/src/lib.rs", - " - /// ``` - /// b::foo(); - /// ``` - pub fn foo() {} - ", - ) - .file( - "c/Cargo.toml", - r#" - [project] - name = "c" - version = "0.1.0" - - [dependencies] - b = "0.1" - "#, - ) - .file("c/src/lib.rs", "") - .build(); - - Package::new("b", "0.1.0").publish(); - - p.cargo("test --workspace -v").run(); -} - -#[cargo_test] -fn cargo_test_env() { - let src = format!( - r#" - #![crate_type = "rlib"] - - #[test] - fn env_test() {{ - use std::env; - eprintln!("{{}}", env::var("{}").unwrap()); - }} - "#, - cargo::CARGO_ENV - ); - - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", &src) - .build(); - - let cargo = cargo_exe().canonicalize().unwrap(); - p.cargo("test --lib -- --nocapture") - .with_stderr_contains(cargo.to_str().unwrap()) - .with_stdout_contains("test env_test ... ok") - .run(); -} - -#[cargo_test] -fn test_order() { - let p = project() - .file("src/lib.rs", "#[test] fn test_lib() {}") - .file("tests/a.rs", "#[test] fn test_a() {}") - .file("tests/z.rs", "#[test] fn test_z() {}") - .build(); - - p.cargo("test --workspace") - .with_stdout_contains( - " -running 1 test -test test_lib ... ok - -test result: ok. [..] - - -running 1 test -test test_a ... ok - -test result: ok. [..] - - -running 1 test -test test_z ... ok - -test result: ok. [..] -", - ) - .run(); -} - -#[cargo_test] -fn cyclic_dev() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dev-dependencies] - foo = { path = "." } - "#, - ) - .file("src/lib.rs", "#[test] fn test_lib() {}") - .file("tests/foo.rs", "extern crate foo;") - .build(); - - p.cargo("test --workspace").run(); -} - -#[cargo_test] -fn publish_a_crate_without_tests() { - Package::new("testless", "0.1.0") - .file( - "Cargo.toml", - r#" - [project] - name = "testless" - version = "0.1.0" - exclude = ["tests/*"] - - [[test]] - name = "a_test" - "#, - ) - .file("src/lib.rs", "") - // In real life, the package will have a test, - // which would be excluded from .crate file by the - // `exclude` field. Our test harness does not honor - // exclude though, so let's just not add the file! - // .file("tests/a_test.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [dependencies] - testless = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("test").run(); - p.cargo("test --package testless").run(); -} - -#[cargo_test] -fn find_dependency_of_proc_macro_dependency_with_target() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["root", "proc_macro_dep"] - "#, - ) - .file( - "root/Cargo.toml", - r#" - [project] - name = "root" - version = "0.1.0" - authors = [] - - [dependencies] - proc_macro_dep = { path = "../proc_macro_dep" } - "#, - ) - .file( - "root/src/lib.rs", - r#" - #[macro_use] - extern crate proc_macro_dep; - - #[derive(Noop)] - pub struct X; - "#, - ) - .file( - "proc_macro_dep/Cargo.toml", - r#" - [project] - name = "proc_macro_dep" - version = "0.1.0" - authors = [] - - [lib] - proc-macro = true - - [dependencies] - baz = "^0.1" - "#, - ) - .file( - "proc_macro_dep/src/lib.rs", - r#" - extern crate baz; - extern crate proc_macro; - use proc_macro::TokenStream; - - #[proc_macro_derive(Noop)] - pub fn noop(_input: TokenStream) -> TokenStream { - "".parse().unwrap() - } - "#, - ) - .build(); - Package::new("bar", "0.1.0").publish(); - Package::new("baz", "0.1.0") - .dep("bar", "0.1") - .file("src/lib.rs", "extern crate bar;") - .publish(); - p.cargo("test --workspace --target").arg(rustc_host()).run(); -} - -#[cargo_test] -fn test_hint_not_masked_by_doctest() { - let p = project() - .file( - "src/lib.rs", - r#" - /// ``` - /// assert_eq!(1, 1); - /// ``` - pub fn this_works() {} - "#, - ) - .file( - "tests/integ.rs", - r#" - #[test] - fn this_fails() { - panic!(); - } - "#, - ) - .build(); - p.cargo("test --no-fail-fast") - .with_status(101) - .with_stdout_contains("test this_fails ... FAILED") - .with_stdout_contains("[..]this_works (line [..]ok") - .with_stderr_contains( - "[ERROR] test failed, to rerun pass \ - '--test integ'", - ) - .run(); -} - -#[cargo_test] -fn test_hint_workspace_virtual() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", "#[test] fn t1() {}") - .file("b/Cargo.toml", &basic_manifest("b", "0.1.0")) - .file("b/src/lib.rs", "#[test] fn t1() {assert!(false)}") - .build(); - - p.cargo("test") - .with_stderr_contains("[ERROR] test failed, to rerun pass '-p b --lib'") - .with_status(101) - .run(); - p.cargo("test") - .cwd("b") - .with_stderr_contains("[ERROR] test failed, to rerun pass '--lib'") - .with_status(101) - .run(); -} - -#[cargo_test] -fn test_hint_workspace_nonvirtual() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["a"] - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", "#[test] fn t1() {assert!(false)}") - .build(); - - p.cargo("test --workspace") - .with_stderr_contains("[ERROR] test failed, to rerun pass '-p a --lib'") - .with_status(101) - .run(); - p.cargo("test -p a") - .with_stderr_contains("[ERROR] test failed, to rerun pass '-p a --lib'") - .with_status(101) - .run(); -} - -#[cargo_test] -fn json_artifact_includes_test_flag() { - // Verify that the JSON artifact output includes `test` flag. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [profile.test] - opt-level = 1 - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("test --lib -v --message-format=json") - .with_json( - r#" - { - "reason":"compiler-artifact", - "profile": { - "debug_assertions": true, - "debuginfo": 2, - "opt_level": "1", - "overflow_checks": true, - "test": true - }, - "executable": "[..]/foo-[..]", - "features": [], - "package_id":"foo 0.0.1 ([..])", - "manifest_path": "[..]", - "target":{ - "kind":["lib"], - "crate_types":["lib"], - "doc": true, - "doctest": true, - "edition": "2015", - "name":"foo", - "src_path":"[..]lib.rs", - "test": true - }, - "filenames":"{...}", - "fresh": false - } - - {"reason": "build-finished", "success": true} - "#, - ) - .run(); -} - -#[cargo_test] -fn json_artifact_includes_executable_for_library_tests() { - let p = project() - .file("src/main.rs", "fn main() { }") - .file("src/lib.rs", r#"#[test] fn lib_test() {}"#) - .build(); - - p.cargo("test --lib -v --no-run --message-format=json") - .with_json( - r#" - { - "executable": "[..]/foo/target/debug/deps/foo-[..][EXE]", - "features": [], - "filenames": "{...}", - "fresh": false, - "package_id": "foo 0.0.1 ([..])", - "manifest_path": "[..]", - "profile": "{...}", - "reason": "compiler-artifact", - "target": { - "crate_types": [ "lib" ], - "kind": [ "lib" ], - "doc": true, - "doctest": true, - "edition": "2015", - "name": "foo", - "src_path": "[..]/foo/src/lib.rs", - "test": true - } - } - - {"reason": "build-finished", "success": true} - "#, - ) - .run(); -} - -#[cargo_test] -fn json_artifact_includes_executable_for_integration_tests() { - let p = project() - .file( - "tests/integration_test.rs", - r#"#[test] fn integration_test() {}"#, - ) - .build(); - - p.cargo("test -v --no-run --message-format=json --test integration_test") - .with_json( - r#" - { - "executable": "[..]/foo/target/debug/deps/integration_test-[..][EXE]", - "features": [], - "filenames": "{...}", - "fresh": false, - "package_id": "foo 0.0.1 ([..])", - "manifest_path": "[..]", - "profile": "{...}", - "reason": "compiler-artifact", - "target": { - "crate_types": [ "bin" ], - "kind": [ "test" ], - "doc": false, - "doctest": false, - "edition": "2015", - "name": "integration_test", - "src_path": "[..]/foo/tests/integration_test.rs", - "test": true - } - } - - {"reason": "build-finished", "success": true} - "#, - ) - .run(); -} - -#[cargo_test] -fn test_build_script_links() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - links = 'something' - - [lib] - test = false - "#, - ) - .file("build.rs", "fn main() {}") - .file("src/lib.rs", "") - .build(); - - p.cargo("test --no-run").run(); -} - -#[cargo_test] -fn doctest_skip_staticlib() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [lib] - crate-type = ["staticlib"] - "#, - ) - .file( - "src/lib.rs", - r#" - //! ``` - //! assert_eq!(1,2); - //! ``` - "#, - ) - .build(); - - p.cargo("test --doc") - .with_status(101) - .with_stderr( - "\ -[WARNING] doc tests are not supported for crate type(s) `staticlib` in package `foo` -[ERROR] no library targets found in package `foo`", - ) - .run(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo [..] -[FINISHED] test [..] -[RUNNING] [..] (target/debug/deps/foo-[..])", - ) - .run(); -} - -#[cargo_test] -fn can_not_mix_doc_tests_and_regular_tests() { - let p = project() - .file( - "src/lib.rs", - "\ -/// ``` -/// assert_eq!(1, 1) -/// ``` -pub fn foo() -> u8 { 1 } - -#[cfg(test)] mod tests { - #[test] fn it_works() { assert_eq!(2 + 2, 4); } -} -", - ) - .build(); - - p.cargo("test") - .with_stderr( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..]) -[DOCTEST] foo -", - ) - .with_stdout( - " -running 1 test -test tests::it_works ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] - - -running 1 test -test src/lib.rs - foo (line 1) ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] -\n", - ) - .run(); - - p.cargo("test --lib") - .with_stderr( - "\ -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] [..] (target/debug/deps/foo-[..])\n", - ) - .with_stdout( - " -running 1 test -test tests::it_works ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] -\n", - ) - .run(); - - // This has been modified to attempt to diagnose spurious errors on CI. - // For some reason, this is recompiling the lib when it shouldn't. If the - // root cause is ever found, the changes here should be reverted. - // See https://github.com/rust-lang/cargo/issues/6887 - p.cargo("test --doc -vv") - .with_stderr_does_not_contain("[COMPILING] foo [..]") - .with_stderr_contains("[DOCTEST] foo") - .with_stdout( - " -running 1 test -test src/lib.rs - foo (line 1) ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..] - -", - ) - .env("CARGO_LOG", "cargo=trace") - .run(); - - p.cargo("test --lib --doc") - .with_status(101) - .with_stderr("[ERROR] Can't mix --doc with other target selecting options\n") - .run(); -} - -#[cargo_test] -fn can_not_no_run_doc_tests() { - let p = project() - .file( - "src/lib.rs", - r#" - /// ``` - /// let _x = 1 + "foo"; - /// ``` - pub fn foo() -> u8 { 1 } - "#, - ) - .build(); - - p.cargo("test --doc --no-run") - .with_status(101) - .with_stderr("[ERROR] Can't skip running doc tests with --no-run") - .run(); -} - -#[cargo_test] -fn test_all_targets_lib() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("test --all-targets") - .with_stderr( - "\ -[COMPILING] foo [..] -[FINISHED] test [..] -[RUNNING] [..]foo[..] -", - ) - .run(); -} - -#[cargo_test] -fn test_dep_with_dev() { - Package::new("devdep", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - - [dev-dependencies] - devdep = "0.1" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("test -p bar") - .with_status(101) - .with_stderr( - "[ERROR] package `bar` cannot be tested because it requires dev-dependencies \ - and is not a member of the workspace", - ) - .run(); -} - -#[cargo_test] -fn cargo_test_doctest_xcompile_ignores() { - if !is_nightly() { - // -Zdoctest-xcompile is unstable - return; - } - // -Zdoctest-xcompile also enables --enable-per-target-ignores which - // allows the ignore-TARGET syntax. - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file( - "src/lib.rs", - r#" - ///```ignore-x86_64 - ///assert!(cfg!(not(target_arch = "x86_64"))); - ///``` - pub fn foo() -> u8 { - 4 - } - "#, - ) - .build(); - - p.cargo("build").run(); - #[cfg(not(target_arch = "x86_64"))] - p.cargo("test") - .with_stdout_contains( - "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", - ) - .run(); - #[cfg(target_arch = "x86_64")] - p.cargo("test") - .with_status(101) - .with_stdout_contains( - "test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured; 0 filtered out[..]", - ) - .run(); - - #[cfg(not(target_arch = "x86_64"))] - p.cargo("test -Zdoctest-xcompile") - .masquerade_as_nightly_cargo() - .with_stdout_contains( - "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", - ) - .run(); - - #[cfg(target_arch = "x86_64")] - p.cargo("test -Zdoctest-xcompile") - .masquerade_as_nightly_cargo() - .with_stdout_contains( - "test result: ok. 0 passed; 0 failed; 1 ignored; 0 measured; 0 filtered out[..]", - ) - .run(); -} - -#[cargo_test] -fn cargo_test_doctest_xcompile() { - if !cross_compile::can_run_on_host() { - return; - } - if !is_nightly() { - // -Zdoctest-xcompile is unstable - return; - } - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file( - "src/lib.rs", - r#" - - ///``` - ///assert!(1 == 1); - ///``` - pub fn foo() -> u8 { - 4 - } - "#, - ) - .build(); - - p.cargo("build").run(); - p.cargo(&format!("test --target {}", cross_compile::alternate())) - .with_stdout_contains("running 0 tests") - .run(); - p.cargo(&format!( - "test --target {} -Zdoctest-xcompile", - cross_compile::alternate() - )) - .masquerade_as_nightly_cargo() - .with_stdout_contains( - "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", - ) - .run(); -} - -#[cargo_test] -fn cargo_test_doctest_xcompile_runner() { - if !cross_compile::can_run_on_host() { - return; - } - if !is_nightly() { - // -Zdoctest-xcompile is unstable - return; - } - - let runner = project() - .file("Cargo.toml", &basic_bin_manifest("runner")) - .file( - "src/main.rs", - r#" - pub fn main() { - eprintln!("this is a runner"); - let args: Vec = std::env::args().collect(); - std::process::Command::new(&args[1]).spawn(); - } - "#, - ) - .build(); - - runner.cargo("build").run(); - assert!(runner.bin("runner").is_file()); - let runner_path = paths::root().join("runner"); - fs::copy(&runner.bin("runner"), &runner_path).unwrap(); - - let config = paths::root().join(".cargo/config"); - - fs::create_dir_all(config.parent().unwrap()).unwrap(); - // Escape Windows backslashes for TOML config. - let runner_str = runner_path.to_str().unwrap().replace('\\', "\\\\"); - fs::write( - config, - format!( - r#" - [target.'cfg(target_arch = "{}")'] - runner = "{}" - "#, - cross_compile::alternate_arch(), - runner_str - ), - ) - .unwrap(); - - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file( - "src/lib.rs", - &format!( - r#" - ///``` - ///assert!(cfg!(target_arch = "{}")); - ///``` - pub fn foo() -> u8 {{ - 4 - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - p.cargo("build").run(); - p.cargo(&format!("test --target {}", cross_compile::alternate())) - .with_stdout_contains("running 0 tests") - .run(); - p.cargo(&format!( - "test --target {} -Zdoctest-xcompile", - cross_compile::alternate() - )) - .masquerade_as_nightly_cargo() - .with_stdout_contains( - "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", - ) - .with_stderr_contains("this is a runner") - .run(); -} - -#[cargo_test] -fn cargo_test_doctest_xcompile_no_runner() { - if !cross_compile::can_run_on_host() { - return; - } - if !is_nightly() { - // -Zdoctest-xcompile is unstable - return; - } - - let p = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file( - "src/lib.rs", - &format!( - r#" - ///``` - ///assert!(cfg!(target_arch = "{}")); - ///``` - pub fn foo() -> u8 {{ - 4 - }} - "#, - cross_compile::alternate_arch() - ), - ) - .build(); - - p.cargo("build").run(); - p.cargo(&format!("test --target {}", cross_compile::alternate())) - .with_stdout_contains("running 0 tests") - .run(); - p.cargo(&format!( - "test --target {} -Zdoctest-xcompile", - cross_compile::alternate() - )) - .masquerade_as_nightly_cargo() - .with_stdout_contains( - "test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out[..]", - ) - .run(); -} - -#[cargo_test] -fn panic_abort_tests() { - if !is_nightly() { - // -Zpanic-abort-tests in rustc is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - - [dependencies] - a = { path = 'a' } - - [profile.dev] - panic = 'abort' - [profile.test] - panic = 'abort' - "#, - ) - .file( - "src/lib.rs", - r#" - #[test] - fn foo() { - a::foo(); - } - "#, - ) - .file("a/Cargo.toml", &basic_lib_manifest("a")) - .file("a/src/lib.rs", "pub fn foo() {}") - .build(); - - p.cargo("test -Z panic-abort-tests -v") - .with_stderr_contains("[..]--crate-name a [..]-C panic=abort[..]") - .with_stderr_contains("[..]--crate-name foo [..]-C panic=abort[..]") - .with_stderr_contains("[..]--crate-name foo [..]-C panic=abort[..]--test[..]") - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn panic_abort_only_test() { - if !is_nightly() { - // -Zpanic-abort-tests in rustc is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - - [dependencies] - a = { path = 'a' } - - [profile.test] - panic = 'abort' - "#, - ) - .file( - "src/lib.rs", - r#" - #[test] - fn foo() { - a::foo(); - } - "#, - ) - .file("a/Cargo.toml", &basic_lib_manifest("a")) - .file("a/src/lib.rs", "pub fn foo() {}") - .build(); - - p.cargo("test -Z panic-abort-tests -v") - .with_stderr_contains("warning: `panic` setting is ignored for `test` profile") - .masquerade_as_nightly_cargo() - .run(); -} - -#[cargo_test] -fn panic_abort_test_profile_inherits() { - if !is_nightly() { - // -Zpanic-abort-tests in rustc is unstable - return; - } - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = 'foo' - version = '0.1.0' - - [dependencies] - a = { path = 'a' } - - [profile.dev] - panic = 'abort' - "#, - ) - .file( - "src/lib.rs", - r#" - #[test] - fn foo() { - a::foo(); - } - "#, - ) - .file("a/Cargo.toml", &basic_lib_manifest("a")) - .file("a/src/lib.rs", "pub fn foo() {}") - .build(); - - p.cargo("test -Z panic-abort-tests -v") - .masquerade_as_nightly_cargo() - .with_status(0) - .run(); -} - -#[cargo_test] -fn bin_env_for_test() { - // Test for the `CARGO_BIN_` environment variables for tests. - // - // Note: The Unicode binary uses a `[[bin]]` definition because different - // filesystems normalize utf-8 in different ways. For example, HFS uses - // "gru\u{308}รŸen" and APFS uses "gr\u{fc}รŸen". Defining it in TOML forces - // one form to be used. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - edition = "2018" - - [[bin]] - name = 'grรผรŸen' - path = 'src/bin/grussen.rs' - "#, - ) - .file("src/bin/foo.rs", "fn main() {}") - .file("src/bin/with-dash.rs", "fn main() {}") - .file("src/bin/grussen.rs", "fn main() {}") - .build(); - - let bin_path = |name| p.bin(name).to_string_lossy().replace("\\", "\\\\"); - p.change_file( - "tests/check_env.rs", - &r#" - #[test] - fn run_bins() { - assert_eq!(env!("CARGO_BIN_EXE_foo"), ""); - assert_eq!(env!("CARGO_BIN_EXE_with-dash"), ""); - assert_eq!(env!("CARGO_BIN_EXE_grรผรŸen"), ""); - } - "# - .replace("", &bin_path("foo")) - .replace("", &bin_path("with-dash")) - .replace("", &bin_path("grรผรŸen")), - ); - - p.cargo("test --test check_env").run(); - p.cargo("check --test check_env").run(); -} - -#[cargo_test] -fn test_workspaces_cwd() { - // This tests that all the different test types are executed from the - // crate directory (manifest_dir), and not from the workspace root. - - let make_lib_file = |expected| { - format!( - r#" - //! ``` - //! assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap()); - //! assert_eq!("{expected}", include_str!("../file.txt")); - //! assert_eq!( - //! std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")), - //! std::env::current_dir().unwrap(), - //! ); - //! ``` - - #[test] - fn test_unit_{expected}_cwd() {{ - assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap()); - assert_eq!("{expected}", include_str!("../file.txt")); - assert_eq!( - std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")), - std::env::current_dir().unwrap(), - ); - }} - "#, - expected = expected - ) - }; - let make_test_file = |expected| { - format!( - r#" - #[test] - fn test_integration_{expected}_cwd() {{ - assert_eq!("{expected}", std::fs::read_to_string("file.txt").unwrap()); - assert_eq!("{expected}", include_str!("../file.txt")); - assert_eq!( - std::path::PathBuf::from(std::env!("CARGO_MANIFEST_DIR")), - std::env::current_dir().unwrap(), - ); - }} - "#, - expected = expected - ) - }; - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "root-crate" - version = "0.0.0" - - [workspace] - members = [".", "nested-crate", "very/deeply/nested/deep-crate"] - "#, - ) - .file("file.txt", "root") - .file("src/lib.rs", &make_lib_file("root")) - .file("tests/integration.rs", &make_test_file("root")) - .file( - "nested-crate/Cargo.toml", - r#" - [package] - name = "nested-crate" - version = "0.0.0" - "#, - ) - .file("nested-crate/file.txt", "nested") - .file("nested-crate/src/lib.rs", &make_lib_file("nested")) - .file( - "nested-crate/tests/integration.rs", - &make_test_file("nested"), - ) - .file( - "very/deeply/nested/deep-crate/Cargo.toml", - r#" - [package] - name = "deep-crate" - version = "0.0.0" - "#, - ) - .file("very/deeply/nested/deep-crate/file.txt", "deep") - .file( - "very/deeply/nested/deep-crate/src/lib.rs", - &make_lib_file("deep"), - ) - .file( - "very/deeply/nested/deep-crate/tests/integration.rs", - &make_test_file("deep"), - ) - .build(); - - p.cargo("test --workspace --all") - .with_stderr_contains("[DOCTEST] root-crate") - .with_stderr_contains("[DOCTEST] nested-crate") - .with_stderr_contains("[DOCTEST] deep-crate") - .with_stdout_contains("test test_unit_root_cwd ... ok") - .with_stdout_contains("test test_unit_nested_cwd ... ok") - .with_stdout_contains("test test_unit_deep_cwd ... ok") - .with_stdout_contains("test test_integration_root_cwd ... ok") - .with_stdout_contains("test test_integration_nested_cwd ... ok") - .with_stdout_contains("test test_integration_deep_cwd ... ok") - .run(); - - p.cargo("test -p root-crate --all") - .with_stderr_contains("[DOCTEST] root-crate") - .with_stdout_contains("test test_unit_root_cwd ... ok") - .with_stdout_contains("test test_integration_root_cwd ... ok") - .run(); - - p.cargo("test -p nested-crate --all") - .with_stderr_contains("[DOCTEST] nested-crate") - .with_stdout_contains("test test_unit_nested_cwd ... ok") - .with_stdout_contains("test test_integration_nested_cwd ... ok") - .run(); - - p.cargo("test -p deep-crate --all") - .with_stderr_contains("[DOCTEST] deep-crate") - .with_stdout_contains("test test_unit_deep_cwd ... ok") - .with_stdout_contains("test test_integration_deep_cwd ... ok") - .run(); - - p.cargo("test --all") - .cwd("nested-crate") - .with_stderr_contains("[DOCTEST] nested-crate") - .with_stdout_contains("test test_unit_nested_cwd ... ok") - .with_stdout_contains("test test_integration_nested_cwd ... ok") - .run(); - - p.cargo("test --all") - .cwd("very/deeply/nested/deep-crate") - .with_stderr_contains("[DOCTEST] deep-crate") - .with_stdout_contains("test test_unit_deep_cwd ... ok") - .with_stdout_contains("test test_integration_deep_cwd ... ok") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/timings.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/timings.rs deleted file mode 100644 index cd173dd74..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/timings.rs +++ /dev/null @@ -1,65 +0,0 @@ -//! Tests for -Ztimings. - -use cargo_test_support::project; -use cargo_test_support::registry::Package; - -#[cargo_test] -fn timings_works() { - Package::new("dep", "0.1.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = "0.1" - "#, - ) - .file("src/lib.rs", "") - .file("src/main.rs", "fn main() {}") - .file("tests/t1.rs", "") - .file("examples/ex1.rs", "fn main() {}") - .build(); - - p.cargo("build --all-targets -Ztimings") - .masquerade_as_nightly_cargo() - .with_stderr_unordered( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] dep v0.1.0 [..] -[COMPILING] dep v0.1.0 -[COMPILING] foo v0.1.0 [..] -[COMPLETED] dep v0.1.0 in [..]s -[COMPLETED] foo v0.1.0 in [..]s -[COMPLETED] foo v0.1.0 bin \"foo\" in [..]s -[COMPLETED] foo v0.1.0 example \"ex1\" in [..]s -[COMPLETED] foo v0.1.0 lib (test) in [..]s -[COMPLETED] foo v0.1.0 bin \"foo\" (test) in [..]s -[COMPLETED] foo v0.1.0 test \"t1\" (test) in [..]s -[FINISHED] [..] - Timing report saved to [..]/foo/cargo-timing-[..].html -", - ) - .run(); - - p.cargo("clean").run(); - - p.cargo("test -Ztimings") - .masquerade_as_nightly_cargo() - .run(); - - p.cargo("clean").run(); - - p.cargo("check -Ztimings") - .masquerade_as_nightly_cargo() - .run(); - - p.cargo("clean").run(); - - p.cargo("doc -Ztimings").masquerade_as_nightly_cargo().run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tool_paths.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tool_paths.rs deleted file mode 100644 index d8be6b6dc..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tool_paths.rs +++ /dev/null @@ -1,402 +0,0 @@ -//! Tests for configuration values that point to programs. - -use cargo_test_support::{basic_lib_manifest, project, rustc_host, rustc_host_env}; - -#[cargo_test] -fn pathless_tools() { - let target = rustc_host(); - - let foo = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - linker = "nonexistent-linker" - "#, - target - ), - ) - .build(); - - foo.cargo("build --verbose") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc [..] -C linker=nonexistent-linker [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn absolute_tools() { - let target = rustc_host(); - - // Escaped as they appear within a TOML config file - let linker = if cfg!(windows) { - r#"C:\\bogus\\nonexistent-linker"# - } else { - r#"/bogus/nonexistent-linker"# - }; - - let foo = project() - .file("Cargo.toml", &basic_lib_manifest("foo")) - .file("src/lib.rs", "") - .file( - ".cargo/config", - &format!( - r#" - [target.{target}] - linker = "{linker}" - "#, - target = target, - linker = linker - ), - ) - .build(); - - foo.cargo("build --verbose") - .with_stderr( - "\ -[COMPILING] foo v0.5.0 ([CWD]) -[RUNNING] `rustc [..] -C linker=[..]bogus/nonexistent-linker [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn relative_tools() { - let target = rustc_host(); - - // Escaped as they appear within a TOML config file - let linker = if cfg!(windows) { - r#".\\tools\\nonexistent-linker"# - } else { - r#"./tools/nonexistent-linker"# - }; - - // Funky directory structure to test that relative tool paths are made absolute - // by reference to the `.cargo/..` directory and not to (for example) the CWD. - let p = project() - .no_manifest() - .file("bar/Cargo.toml", &basic_lib_manifest("bar")) - .file("bar/src/lib.rs", "") - .file( - ".cargo/config", - &format!( - r#" - [target.{target}] - linker = "{linker}" - "#, - target = target, - linker = linker - ), - ) - .build(); - - let prefix = p.root().into_os_string().into_string().unwrap(); - - p.cargo("build --verbose") - .cwd("bar") - .with_stderr(&format!( - "\ -[COMPILING] bar v0.5.0 ([CWD]) -[RUNNING] `rustc [..] -C linker={prefix}/./tools/nonexistent-linker [..]` -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - prefix = prefix, - )) - .run(); -} - -#[cargo_test] -fn custom_runner() { - let target = rustc_host(); - - let p = project() - .file("src/main.rs", "fn main() {}") - .file("tests/test.rs", "") - .file("benches/bench.rs", "") - .file( - ".cargo/config", - &format!( - r#" - [target.{}] - runner = "nonexistent-runner -r" - "#, - target - ), - ) - .build(); - - p.cargo("run -- --param") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` -", - ) - .run(); - - p.cargo("test --test test --verbose -- --param") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc [..]` -[FINISHED] test [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `nonexistent-runner -r [..]/target/debug/deps/test-[..][EXE] --param` -", - ) - .run(); - - p.cargo("bench --bench bench --verbose -- --param") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[RUNNING] `rustc [..]` -[RUNNING] `rustc [..]` -[FINISHED] bench [optimized] target(s) in [..] -[RUNNING] `nonexistent-runner -r [..]/target/release/deps/bench-[..][EXE] --param --bench` -", - ) - .run(); -} - -// can set a custom runner via `target.'cfg(..)'.runner` -#[cargo_test] -fn custom_runner_cfg() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [target.'cfg(not(target_os = "none"))'] - runner = "nonexistent-runner -r" - "#, - ) - .build(); - - p.cargo("run -- --param") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` -", - ) - .run(); -} - -// custom runner set via `target.$triple.runner` have precedence over `target.'cfg(..)'.runner` -#[cargo_test] -fn custom_runner_cfg_precedence() { - let target = rustc_host(); - - let p = project() - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - &format!( - r#" - [target.'cfg(not(target_os = "none"))'] - runner = "ignored-runner" - - [target.{}] - runner = "nonexistent-runner -r" - "#, - target - ), - ) - .build(); - - p.cargo("run -- --param") - .with_status(101) - .with_stderr_contains( - "\ -[COMPILING] foo v0.0.1 ([CWD]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -[RUNNING] `nonexistent-runner -r target/debug/foo[EXE] --param` -", - ) - .run(); -} - -#[cargo_test] -fn custom_runner_cfg_collision() { - let p = project() - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config", - r#" - [target.'cfg(not(target_arch = "avr"))'] - runner = "true" - - [target.'cfg(not(target_os = "none"))'] - runner = "false" - "#, - ) - .build(); - - p.cargo("run -- --param") - .with_status(101) - .with_stderr( - "\ -[ERROR] several matching instances of `target.'cfg(..)'.runner` in `.cargo/config` -first match `cfg(not(target_arch = \"avr\"))` located in [..]/foo/.cargo/config -second match `cfg(not(target_os = \"none\"))` located in [..]/foo/.cargo/config -", - ) - .run(); -} - -#[cargo_test] -fn custom_runner_env() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env()); - - p.cargo("run") - .env(&key, "nonexistent-runner --foo") - .with_status(101) - // FIXME: Update "Caused by" error message once rust/pull/87704 is merged. - // On Windows, changing to a custom executable resolver has changed the - // error messages. - .with_stderr(&format!( - "\ -[COMPILING] foo [..] -[FINISHED] dev [..] -[RUNNING] `nonexistent-runner --foo target/debug/foo[EXE]` -[ERROR] could not execute process `nonexistent-runner --foo target/debug/foo[EXE]` (never executed) - -Caused by: - [..] -" - )) - .run(); -} - -#[cargo_test] -fn custom_runner_env_overrides_config() { - let target = rustc_host(); - let p = project() - .file("src/main.rs", "fn main() {}") - .file( - ".cargo/config.toml", - &format!( - r#" - [target.{}] - runner = "should-not-run -r" - "#, - target - ), - ) - .build(); - - let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env()); - - p.cargo("run") - .env(&key, "should-run --foo") - .with_status(101) - .with_stderr_contains("[RUNNING] `should-run --foo target/debug/foo[EXE]`") - .run(); -} - -#[cargo_test] -#[cfg(unix)] // Assumes `true` is in PATH. -fn custom_runner_env_true() { - // Check for a bug where "true" was interpreted as a boolean instead of - // the executable. - let p = project().file("src/main.rs", "fn main() {}").build(); - - let key = format!("CARGO_TARGET_{}_RUNNER", rustc_host_env()); - - p.cargo("run") - .env(&key, "true") - .with_stderr_contains("[RUNNING] `true target/debug/foo[EXE]`") - .run(); -} - -#[cargo_test] -fn custom_linker_env() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - let key = format!("CARGO_TARGET_{}_LINKER", rustc_host_env()); - - p.cargo("build -v") - .env(&key, "nonexistent-linker") - .with_status(101) - .with_stderr_contains("[RUNNING] `rustc [..]-C linker=nonexistent-linker [..]") - .run(); -} - -#[cargo_test] -fn target_in_environment_contains_lower_case() { - let p = project().file("src/main.rs", "fn main() {}").build(); - - let target = rustc_host(); - let env_key = format!( - "CARGO_TARGET_{}_LINKER", - target.to_lowercase().replace('-', "_") - ); - - p.cargo("build -v --target") - .arg(target) - .env(&env_key, "nonexistent-linker") - .with_stderr_contains(format!( - "warning: Environment variables are expected to use uppercase \ - letters and underscores, the variable `{}` will be ignored and \ - have no effect", - env_key - )) - .run(); -} - -#[cargo_test] -fn cfg_ignored_fields() { - // Test for some ignored fields in [target.'cfg()'] tables. - let p = project() - .file( - ".cargo/config", - r#" - # Try some empty tables. - [target.'cfg(not(foo))'] - [target.'cfg(not(bar))'.somelib] - - # A bunch of unused fields. - [target.'cfg(not(target_os = "none"))'] - linker = 'false' - ar = 'false' - foo = {rustc-flags = "-l foo"} - invalid = 1 - runner = 'false' - rustflags = '' - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .with_stderr( - "\ -[WARNING] unused key `somelib` in [target] config table `cfg(not(bar))` -[WARNING] unused key `ar` in [target] config table `cfg(not(target_os = \"none\"))` -[WARNING] unused key `foo` in [target] config table `cfg(not(target_os = \"none\"))` -[WARNING] unused key `invalid` in [target] config table `cfg(not(target_os = \"none\"))` -[WARNING] unused key `linker` in [target] config table `cfg(not(target_os = \"none\"))` -[CHECKING] foo v0.0.1 ([..]) -[FINISHED] [..] -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tree.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tree.rs deleted file mode 100644 index 63115a859..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tree.rs +++ /dev/null @@ -1,2049 +0,0 @@ -//! Tests for the `cargo tree` command. - -use super::features2::switch_to_resolver_2; -use cargo_test_support::cross_compile::{self, alternate}; -use cargo_test_support::registry::{Dependency, Package}; -use cargo_test_support::{basic_manifest, git, project, rustc_host, Project}; - -fn make_simple_proj() -> Project { - Package::new("c", "1.0.0").publish(); - Package::new("b", "1.0.0").dep("c", "1.0").publish(); - Package::new("a", "1.0.0").dep("b", "1.0").publish(); - Package::new("bdep", "1.0.0").dep("b", "1.0").publish(); - Package::new("devdep", "1.0.0").dep("b", "1.0.0").publish(); - - project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = "1.0" - c = "1.0" - - [build-dependencies] - bdep = "1.0" - - [dev-dependencies] - devdep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build() -} - -#[cargo_test] -fn simple() { - // A simple test with a few different dependencies. - let p = make_simple_proj(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ a v1.0.0 -โ”‚ โ””โ”€โ”€ b v1.0.0 -โ”‚ โ””โ”€โ”€ c v1.0.0 -โ””โ”€โ”€ c v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -", - ) - .run(); - - p.cargo("tree -p bdep") - .with_stdout( - "\ -bdep v1.0.0 -โ””โ”€โ”€ b v1.0.0 - โ””โ”€โ”€ c v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn virtual_workspace() { - // Multiple packages in a virtual workspace. - Package::new("somedep", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "baz", "c"] - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "1.0.0")) - .file("a/src/lib.rs", "") - .file( - "baz/Cargo.toml", - r#" - [package] - name = "baz" - version = "0.1.0" - - [dependencies] - c = { path = "../c" } - somedep = "1.0" - "#, - ) - .file("baz/src/lib.rs", "") - .file("c/Cargo.toml", &basic_manifest("c", "1.0.0")) - .file("c/src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -a v1.0.0 ([..]/foo/a) - -baz v0.1.0 ([..]/foo/baz) -โ”œโ”€โ”€ c v1.0.0 ([..]/foo/c) -โ””โ”€โ”€ somedep v1.0.0 - -c v1.0.0 ([..]/foo/c) -", - ) - .run(); - - p.cargo("tree -p a").with_stdout("a v1.0.0 [..]").run(); - - p.cargo("tree") - .cwd("baz") - .with_stdout( - "\ -baz v0.1.0 ([..]/foo/baz) -โ”œโ”€โ”€ c v1.0.0 ([..]/foo/c) -โ””โ”€โ”€ somedep v1.0.0 -", - ) - .run(); - - // exclude baz - p.cargo("tree --workspace --exclude baz") - .with_stdout( - "\ -a v1.0.0 ([..]/foo/a) - -c v1.0.0 ([..]/foo/c) -", - ) - .run(); - - // exclude glob '*z' - p.cargo("tree --workspace --exclude '*z'") - .with_stdout( - "\ -a v1.0.0 ([..]/foo/a) - -c v1.0.0 ([..]/foo/c) -", - ) - .run(); - - // include glob '*z' - p.cargo("tree -p '*z'") - .with_stdout( - "\ -baz v0.1.0 ([..]/foo/baz) -โ”œโ”€โ”€ c v1.0.0 ([..]/foo/c) -โ””โ”€โ”€ somedep v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn dedupe_edges() { - // Works around https://github.com/rust-lang/cargo/issues/7985 - Package::new("bitflags", "1.0.0").publish(); - Package::new("manyfeat", "1.0.0") - .feature("f1", &[]) - .feature("f2", &[]) - .feature("f3", &[]) - .dep("bitflags", "1.0") - .publish(); - Package::new("a", "1.0.0") - .feature_dep("manyfeat", "1.0", &["f1"]) - .publish(); - Package::new("b", "1.0.0") - .feature_dep("manyfeat", "1.0", &["f2"]) - .publish(); - Package::new("c", "1.0.0") - .feature_dep("manyfeat", "1.0", &["f3"]) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = "1.0" - b = "1.0" - c = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ a v1.0.0 -โ”‚ โ””โ”€โ”€ manyfeat v1.0.0 -โ”‚ โ””โ”€โ”€ bitflags v1.0.0 -โ”œโ”€โ”€ b v1.0.0 -โ”‚ โ””โ”€โ”€ manyfeat v1.0.0 (*) -โ””โ”€โ”€ c v1.0.0 - โ””โ”€โ”€ manyfeat v1.0.0 (*) -", - ) - .run(); -} - -#[cargo_test] -fn renamed_deps() { - // Handles renamed dependencies. - Package::new("one", "1.0.0").publish(); - Package::new("two", "1.0.0").publish(); - Package::new("bar", "1.0.0").dep("one", "1.0").publish(); - Package::new("bar", "2.0.0").dep("two", "1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [dependencies] - bar1 = {version = "1.0", package="bar"} - bar2 = {version = "2.0", package="bar"} - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v1.0.0 ([..]/foo) -โ”œโ”€โ”€ bar v1.0.0 -โ”‚ โ””โ”€โ”€ one v1.0.0 -โ””โ”€โ”€ bar v2.0.0 - โ””โ”€โ”€ two v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn source_kinds() { - // Handles git and path sources. - Package::new("regdep", "1.0.0").publish(); - let git_project = git::new("gitdep", |p| { - p.file("Cargo.toml", &basic_manifest("gitdep", "1.0.0")) - .file("src/lib.rs", "") - }); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - regdep = "1.0" - pathdep = {{ path = "pathdep" }} - gitdep = {{ git = "{}" }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .file("pathdep/Cargo.toml", &basic_manifest("pathdep", "1.0.0")) - .file("pathdep/src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ gitdep v1.0.0 (file://[..]/gitdep#[..]) -โ”œโ”€โ”€ pathdep v1.0.0 ([..]/foo/pathdep) -โ””โ”€โ”€ regdep v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn features() { - // Exercises a variety of feature behaviors. - Package::new("optdep_default", "1.0.0").publish(); - Package::new("optdep", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [dependencies] - optdep_default = { version = "1.0", optional = true } - optdep = { version = "1.0", optional = true } - - [features] - default = ["optdep_default"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -a v0.1.0 ([..]/foo) -โ””โ”€โ”€ optdep_default v1.0.0 -", - ) - .run(); - - p.cargo("tree --no-default-features") - .with_stdout( - "\ -a v0.1.0 ([..]/foo) -", - ) - .run(); - - p.cargo("tree --all-features") - .with_stdout( - "\ -a v0.1.0 ([..]/foo) -โ”œโ”€โ”€ optdep v1.0.0 -โ””โ”€โ”€ optdep_default v1.0.0 -", - ) - .run(); - - p.cargo("tree --features optdep") - .with_stdout( - "\ -a v0.1.0 ([..]/foo) -โ”œโ”€โ”€ optdep v1.0.0 -โ””โ”€โ”€ optdep_default v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn filters_target() { - // --target flag - if cross_compile::disabled() { - return; - } - Package::new("targetdep", "1.0.0").publish(); - Package::new("hostdep", "1.0.0").publish(); - Package::new("devdep", "1.0.0").publish(); - Package::new("build_target_dep", "1.0.0").publish(); - Package::new("build_host_dep", "1.0.0") - .target_dep("targetdep", "1.0", alternate()) - .target_dep("hostdep", "1.0", rustc_host()) - .publish(); - Package::new("pm_target", "1.0.0") - .proc_macro(true) - .publish(); - Package::new("pm_host", "1.0.0").proc_macro(true).publish(); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [target.'{alt}'.dependencies] - targetdep = "1.0" - pm_target = "1.0" - - [target.'{host}'.dependencies] - hostdep = "1.0" - pm_host = "1.0" - - [target.'{alt}'.dev-dependencies] - devdep = "1.0" - - [target.'{alt}'.build-dependencies] - build_target_dep = "1.0" - - [target.'{host}'.build-dependencies] - build_host_dep = "1.0" - "#, - alt = alternate(), - host = rustc_host() - ), - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ hostdep v1.0.0 -โ””โ”€โ”€ pm_host v1.0.0 (proc-macro) -[build-dependencies] -โ””โ”€โ”€ build_host_dep v1.0.0 - โ””โ”€โ”€ hostdep v1.0.0 -", - ) - .run(); - - p.cargo("tree --target") - .arg(alternate()) - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ pm_target v1.0.0 (proc-macro) -โ””โ”€โ”€ targetdep v1.0.0 -[build-dependencies] -โ””โ”€โ”€ build_host_dep v1.0.0 - โ””โ”€โ”€ hostdep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 -", - ) - .run(); - - p.cargo("tree --target") - .arg(rustc_host()) - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ hostdep v1.0.0 -โ””โ”€โ”€ pm_host v1.0.0 (proc-macro) -[build-dependencies] -โ””โ”€โ”€ build_host_dep v1.0.0 - โ””โ”€โ”€ hostdep v1.0.0 -", - ) - .run(); - - p.cargo("tree --target=all") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ hostdep v1.0.0 -โ”œโ”€โ”€ pm_host v1.0.0 (proc-macro) -โ”œโ”€โ”€ pm_target v1.0.0 (proc-macro) -โ””โ”€โ”€ targetdep v1.0.0 -[build-dependencies] -โ”œโ”€โ”€ build_host_dep v1.0.0 -โ”‚ โ”œโ”€โ”€ hostdep v1.0.0 -โ”‚ โ””โ”€โ”€ targetdep v1.0.0 -โ””โ”€โ”€ build_target_dep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 -", - ) - .run(); - - // no-proc-macro - p.cargo("tree --target=all -e no-proc-macro") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ hostdep v1.0.0 -โ””โ”€โ”€ targetdep v1.0.0 -[build-dependencies] -โ”œโ”€โ”€ build_host_dep v1.0.0 -โ”‚ โ”œโ”€โ”€ hostdep v1.0.0 -โ”‚ โ””โ”€โ”€ targetdep v1.0.0 -โ””โ”€โ”€ build_target_dep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn dep_kinds() { - Package::new("inner-devdep", "1.0.0").publish(); - Package::new("inner-builddep", "1.0.0").publish(); - Package::new("inner-normal", "1.0.0").publish(); - Package::new("inner-pm", "1.0.0").proc_macro(true).publish(); - Package::new("inner-buildpm", "1.0.0") - .proc_macro(true) - .publish(); - Package::new("normaldep", "1.0.0") - .dep("inner-normal", "1.0") - .dev_dep("inner-devdep", "1.0") - .build_dep("inner-builddep", "1.0") - .publish(); - Package::new("devdep", "1.0.0") - .dep("inner-normal", "1.0") - .dep("inner-pm", "1.0") - .dev_dep("inner-devdep", "1.0") - .build_dep("inner-builddep", "1.0") - .build_dep("inner-buildpm", "1.0") - .publish(); - Package::new("builddep", "1.0.0") - .dep("inner-normal", "1.0") - .dev_dep("inner-devdep", "1.0") - .build_dep("inner-builddep", "1.0") - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - normaldep = "1.0" - - [dev-dependencies] - devdep = "1.0" - - [build-dependencies] - builddep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ normaldep v1.0.0 - โ””โ”€โ”€ inner-normal v1.0.0 - [build-dependencies] - โ””โ”€โ”€ inner-builddep v1.0.0 -[build-dependencies] -โ””โ”€โ”€ builddep v1.0.0 - โ””โ”€โ”€ inner-normal v1.0.0 - [build-dependencies] - โ””โ”€โ”€ inner-builddep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - โ”œโ”€โ”€ inner-normal v1.0.0 - โ””โ”€โ”€ inner-pm v1.0.0 (proc-macro) - [build-dependencies] - โ”œโ”€โ”€ inner-builddep v1.0.0 - โ””โ”€โ”€ inner-buildpm v1.0.0 (proc-macro) -", - ) - .run(); - - p.cargo("tree -e no-dev") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ normaldep v1.0.0 - โ””โ”€โ”€ inner-normal v1.0.0 - [build-dependencies] - โ””โ”€โ”€ inner-builddep v1.0.0 -[build-dependencies] -โ””โ”€โ”€ builddep v1.0.0 - โ””โ”€โ”€ inner-normal v1.0.0 - [build-dependencies] - โ””โ”€โ”€ inner-builddep v1.0.0 -", - ) - .run(); - - p.cargo("tree -e normal") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ normaldep v1.0.0 - โ””โ”€โ”€ inner-normal v1.0.0 -", - ) - .run(); - - p.cargo("tree -e dev,build") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -[build-dependencies] -โ””โ”€โ”€ builddep v1.0.0 - [build-dependencies] - โ””โ”€โ”€ inner-builddep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - [build-dependencies] - โ”œโ”€โ”€ inner-builddep v1.0.0 - โ””โ”€โ”€ inner-buildpm v1.0.0 (proc-macro) -", - ) - .run(); - - p.cargo("tree -e dev,build,no-proc-macro") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -[build-dependencies] -โ””โ”€โ”€ builddep v1.0.0 - [build-dependencies] - โ””โ”€โ”€ inner-builddep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - [build-dependencies] - โ””โ”€โ”€ inner-builddep v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn cyclic_dev_dep() { - // Cyclical dev-dependency and inverse flag. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dev-dependencies] - dev-dep = { path = "dev-dep" } - "#, - ) - .file("src/lib.rs", "") - .file( - "dev-dep/Cargo.toml", - r#" - [package] - name = "dev-dep" - version = "0.1.0" - - [dependencies] - foo = { path=".." } - "#, - ) - .file("dev-dep/src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -[dev-dependencies] -โ””โ”€โ”€ dev-dep v0.1.0 ([..]/foo/dev-dep) - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) (*) -", - ) - .run(); - - p.cargo("tree --invert foo") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ dev-dep v0.1.0 ([..]/foo/dev-dep) - [dev-dependencies] - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) (*) -", - ) - .run(); -} - -#[cargo_test] -fn invert() { - Package::new("b1", "1.0.0").dep("c", "1.0").publish(); - Package::new("b2", "1.0.0").dep("d", "1.0").publish(); - Package::new("c", "1.0.0").publish(); - Package::new("d", "1.0.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - b1 = "1.0" - b2 = "1.0" - c = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ b1 v1.0.0 -โ”‚ โ””โ”€โ”€ c v1.0.0 -โ”œโ”€โ”€ b2 v1.0.0 -โ”‚ โ””โ”€โ”€ d v1.0.0 -โ””โ”€โ”€ c v1.0.0 -", - ) - .run(); - - p.cargo("tree --invert c") - .with_stdout( - "\ -c v1.0.0 -โ”œโ”€โ”€ b1 v1.0.0 -โ”‚ โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); -} - -#[cargo_test] -fn invert_with_build_dep() { - // -i for a common dependency between normal and build deps. - Package::new("common", "1.0.0").publish(); - Package::new("bdep", "1.0.0").dep("common", "1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - common = "1.0" - - [build-dependencies] - bdep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ common v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bdep v1.0.0 - โ””โ”€โ”€ common v1.0.0 -", - ) - .run(); - - p.cargo("tree -i common") - .with_stdout( - "\ -common v1.0.0 -โ”œโ”€โ”€ bdep v1.0.0 -โ”‚ [build-dependencies] -โ”‚ โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); -} - -#[cargo_test] -fn no_indent() { - let p = make_simple_proj(); - - p.cargo("tree --prefix=none") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -a v1.0.0 -b v1.0.0 -c v1.0.0 -c v1.0.0 -bdep v1.0.0 -b v1.0.0 (*) -devdep v1.0.0 -b v1.0.0 (*) -", - ) - .run(); -} - -#[cargo_test] -fn prefix_depth() { - let p = make_simple_proj(); - - p.cargo("tree --prefix=depth") - .with_stdout( - "\ -0foo v0.1.0 ([..]/foo) -1a v1.0.0 -2b v1.0.0 -3c v1.0.0 -1c v1.0.0 -1bdep v1.0.0 -2b v1.0.0 (*) -1devdep v1.0.0 -2b v1.0.0 (*) -", - ) - .run(); -} - -#[cargo_test] -fn no_dedupe() { - let p = make_simple_proj(); - - p.cargo("tree --no-dedupe") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ a v1.0.0 -โ”‚ โ””โ”€โ”€ b v1.0.0 -โ”‚ โ””โ”€โ”€ c v1.0.0 -โ””โ”€โ”€ c v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 - โ””โ”€โ”€ c v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 - โ””โ”€โ”€ c v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn no_dedupe_cycle() { - // --no-dedupe with a dependency cycle - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dev-dependencies] - bar = {path = "bar"} - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [dependencies] - foo = {path=".."} - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -[dev-dependencies] -โ””โ”€โ”€ bar v0.1.0 ([..]/foo/bar) - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) (*) -", - ) - .run(); - - p.cargo("tree --no-dedupe") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -[dev-dependencies] -โ””โ”€โ”€ bar v0.1.0 ([..]/foo/bar) - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) (*) -", - ) - .run(); -} - -#[cargo_test] -fn duplicates() { - Package::new("dog", "1.0.0").publish(); - Package::new("dog", "2.0.0").publish(); - Package::new("cat", "1.0.0").publish(); - Package::new("cat", "2.0.0").publish(); - Package::new("dep", "1.0.0") - .dep("dog", "1.0") - .dep("cat", "1.0") - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [dependencies] - dog1 = { version = "1.0", package = "dog" } - dog2 = { version = "2.0", package = "dog" } - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [dependencies] - dep = "1.0" - cat = "2.0" - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("tree -p a") - .with_stdout( - "\ -a v0.1.0 ([..]/foo/a) -โ”œโ”€โ”€ dog v1.0.0 -โ””โ”€โ”€ dog v2.0.0 -", - ) - .run(); - - p.cargo("tree -p b") - .with_stdout( - "\ -b v0.1.0 ([..]/foo/b) -โ”œโ”€โ”€ cat v2.0.0 -โ””โ”€โ”€ dep v1.0.0 - โ”œโ”€โ”€ cat v1.0.0 - โ””โ”€โ”€ dog v1.0.0 -", - ) - .run(); - - p.cargo("tree -p a -d") - .with_stdout( - "\ -dog v1.0.0 -โ””โ”€โ”€ a v0.1.0 ([..]/foo/a) - -dog v2.0.0 -โ””โ”€โ”€ a v0.1.0 ([..]/foo/a) -", - ) - .run(); - - p.cargo("tree -p b -d") - .with_stdout( - "\ -cat v1.0.0 -โ””โ”€โ”€ dep v1.0.0 - โ””โ”€โ”€ b v0.1.0 ([..]/foo/b) - -cat v2.0.0 -โ””โ”€โ”€ b v0.1.0 ([..]/foo/b) -", - ) - .run(); -} - -#[cargo_test] -fn charset() { - let p = make_simple_proj(); - p.cargo("tree --charset ascii") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -|-- a v1.0.0 -| `-- b v1.0.0 -| `-- c v1.0.0 -`-- c v1.0.0 -[build-dependencies] -`-- bdep v1.0.0 - `-- b v1.0.0 (*) -[dev-dependencies] -`-- devdep v1.0.0 - `-- b v1.0.0 (*) -", - ) - .run(); -} - -#[cargo_test] -fn format() { - Package::new("dep", "1.0.0").publish(); - Package::new("other-dep", "1.0.0").publish(); - - Package::new("dep_that_is_awesome", "1.0.0") - .file( - "Cargo.toml", - r#" - [package] - name = "dep_that_is_awesome" - version = "1.0.0" - - [lib] - name = "awesome_dep" - "#, - ) - .file("src/lib.rs", "pub struct Straw;") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - license = "MIT" - repository = "https://github.com/rust-lang/cargo" - - [dependencies] - dep = {version="1.0", optional=true} - other-dep = {version="1.0", optional=true} - dep_that_is_awesome = {version="1.0", optional=true} - - - [features] - default = ["foo"] - foo = ["bar"] - bar = [] - "#, - ) - .file("src/main.rs", "") - .build(); - - p.cargo("tree --format <<<{p}>>>") - .with_stdout("<<>>") - .run(); - - p.cargo("tree --format {}") - .with_stderr( - "\ -[ERROR] tree format `{}` not valid - -Caused by: - unsupported pattern `` -", - ) - .with_status(101) - .run(); - - p.cargo("tree --format {p}-{{hello}}") - .with_stdout("foo v0.1.0 ([..]/foo)-{hello}") - .run(); - - p.cargo("tree --format") - .arg("{p} {l} {r}") - .with_stdout("foo v0.1.0 ([..]/foo) MIT https://github.com/rust-lang/cargo") - .run(); - - p.cargo("tree --format") - .arg("{p} {f}") - .with_stdout("foo v0.1.0 ([..]/foo) bar,default,foo") - .run(); - - p.cargo("tree --all-features --format") - .arg("{p} [{f}]") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) [bar,default,dep,dep_that_is_awesome,foo,other-dep] -โ”œโ”€โ”€ dep v1.0.0 [] -โ”œโ”€โ”€ dep_that_is_awesome v1.0.0 [] -โ””โ”€โ”€ other-dep v1.0.0 [] -", - ) - .run(); - - p.cargo("tree") - .arg("--features=other-dep,dep_that_is_awesome") - .arg("--format={lib}") - .with_stdout( - " -โ”œโ”€โ”€ awesome_dep -โ””โ”€โ”€ other_dep -", - ) - .run(); -} - -#[cargo_test] -fn dev_dep_feature() { - // New feature resolver with optional dep - Package::new("optdep", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("optdep", "1.0").optional(true)) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dev-dependencies] - bar = { version = "1.0", features = ["optdep"] } - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Old behavior. - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ bar v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ bar v1.0.0 (*) -", - ) - .run(); - - p.cargo("tree -e normal") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ bar v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - // New behavior. - switch_to_resolver_2(&p); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ bar v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ bar v1.0.0 (*) -", - ) - .run(); - - p.cargo("tree -e normal") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ bar v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn host_dep_feature() { - // New feature resolver with optional build dep - Package::new("optdep", "1.0.0").publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("optdep", "1.0").optional(true)) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [build-dependencies] - bar = { version = "1.0", features = ["optdep"] } - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .file("build.rs", "fn main() {}") - .build(); - - // Old behavior - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ bar v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bar v1.0.0 (*) -", - ) - .run(); - - // -p - p.cargo("tree -p bar") - .with_stdout( - "\ -bar v1.0.0 -โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - // invert - p.cargo("tree -i optdep") - .with_stdout( - "\ -optdep v1.0.0 -โ””โ”€โ”€ bar v1.0.0 - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) - [build-dependencies] - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); - - // New behavior. - switch_to_resolver_2(&p); - - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ bar v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bar v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - p.cargo("tree -p bar") - .with_stdout( - "\ -bar v1.0.0 - -bar v1.0.0 -โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - p.cargo("tree -i optdep") - .with_stdout( - "\ -optdep v1.0.0 -โ””โ”€โ”€ bar v1.0.0 - [build-dependencies] - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); - - // Check that -d handles duplicates with features. - p.cargo("tree -d") - .with_stdout( - "\ -bar v1.0.0 -โ””โ”€โ”€ foo v0.1.0 ([..]/foo) - -bar v1.0.0 -[build-dependencies] -โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); -} - -#[cargo_test] -fn proc_macro_features() { - // New feature resolver with a proc-macro - Package::new("optdep", "1.0.0").publish(); - Package::new("somedep", "1.0.0") - .add_dep(Dependency::new("optdep", "1.0").optional(true)) - .publish(); - Package::new("pm", "1.0.0") - .proc_macro(true) - .feature_dep("somedep", "1.0", &["optdep"]) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - pm = "1.0" - somedep = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Old behavior - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ pm v1.0.0 (proc-macro) -โ”‚ โ””โ”€โ”€ somedep v1.0.0 -โ”‚ โ””โ”€โ”€ optdep v1.0.0 -โ””โ”€โ”€ somedep v1.0.0 (*) -", - ) - .run(); - - // Old behavior + no-proc-macro - p.cargo("tree -e no-proc-macro") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ somedep v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - // -p - p.cargo("tree -p somedep") - .with_stdout( - "\ -somedep v1.0.0 -โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - // -p -e no-proc-macro - p.cargo("tree -p somedep -e no-proc-macro") - .with_stdout( - "\ -somedep v1.0.0 -โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - // invert - p.cargo("tree -i somedep") - .with_stdout( - "\ -somedep v1.0.0 -โ”œโ”€โ”€ foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ pm v1.0.0 (proc-macro) - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); - - // invert + no-proc-macro - p.cargo("tree -i somedep -e no-proc-macro") - .with_stdout( - "\ -somedep v1.0.0 -โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); - - // New behavior. - switch_to_resolver_2(&p); - - // Note the missing (*) - p.cargo("tree") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ pm v1.0.0 (proc-macro) -โ”‚ โ””โ”€โ”€ somedep v1.0.0 -โ”‚ โ””โ”€โ”€ optdep v1.0.0 -โ””โ”€โ”€ somedep v1.0.0 -", - ) - .run(); - - p.cargo("tree -e no-proc-macro") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ somedep v1.0.0 -", - ) - .run(); - - p.cargo("tree -p somedep") - .with_stdout( - "\ -somedep v1.0.0 - -somedep v1.0.0 -โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - p.cargo("tree -i somedep") - .with_stdout( - "\ -somedep v1.0.0 -โ””โ”€โ”€ foo v0.1.0 ([..]/foo) - -somedep v1.0.0 -โ””โ”€โ”€ pm v1.0.0 (proc-macro) - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); - - p.cargo("tree -i somedep -e no-proc-macro") - .with_stdout( - "\ -somedep v1.0.0 -โ””โ”€โ”€ foo v0.1.0 ([..]/foo) - -somedep v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn itarget_opt_dep() { - // New feature resolver with optional target dep - Package::new("optdep", "1.0.0").publish(); - Package::new("common", "1.0.0") - .add_dep(Dependency::new("optdep", "1.0").optional(true)) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [dependencies] - common = "1.0" - - [target.'cfg(whatever)'.dependencies] - common = { version = "1.0", features = ["optdep"] } - - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Old behavior - p.cargo("tree") - .with_stdout( - "\ -foo v1.0.0 ([..]/foo) -โ””โ”€โ”€ common v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - // New behavior. - switch_to_resolver_2(&p); - - p.cargo("tree") - .with_stdout( - "\ -foo v1.0.0 ([..]/foo) -โ””โ”€โ”€ common v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn ambiguous_name() { - // -p that is ambiguous. - Package::new("dep", "1.0.0").publish(); - Package::new("dep", "2.0.0").publish(); - Package::new("bar", "1.0.0").dep("dep", "2.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = "1.0" - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree -p dep") - .with_stderr_contains( - "\ -error: There are multiple `dep` packages in your project, and the specification `dep` is ambiguous. -Please re-run this command with `-p ` where `` is one of the following: - dep:1.0.0 - dep:2.0.0 -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn workspace_features_are_local() { - // The features for workspace packages should be the same as `cargo build` - // (i.e., the features selected depend on the "current" package). - Package::new("optdep", "1.0.0").publish(); - Package::new("somedep", "1.0.0") - .add_dep(Dependency::new("optdep", "1.0").optional(true)) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [dependencies] - somedep = {version="1.0", features=["optdep"]} - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [dependencies] - somedep = "1.0" - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("tree") - .with_stdout( - "\ -a v0.1.0 ([..]/foo/a) -โ””โ”€โ”€ somedep v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 - -b v0.1.0 ([..]/foo/b) -โ””โ”€โ”€ somedep v1.0.0 (*) -", - ) - .run(); - - p.cargo("tree -p a") - .with_stdout( - "\ -a v0.1.0 ([..]/foo/a) -โ””โ”€โ”€ somedep v1.0.0 - โ””โ”€โ”€ optdep v1.0.0 -", - ) - .run(); - - p.cargo("tree -p b") - .with_stdout( - "\ -b v0.1.0 ([..]/foo/b) -โ””โ”€โ”€ somedep v1.0.0 -", - ) - .run(); -} - -#[cargo_test] -fn unknown_edge_kind() { - let p = project() - .file("Cargo.toml", "") - .file("src/lib.rs", "") - .build(); - - p.cargo("tree -e unknown") - .with_stderr( - "\ -[ERROR] unknown edge kind `unknown`, valid values are \ -\"normal\", \"build\", \"dev\", \ -\"no-normal\", \"no-build\", \"no-dev\", \"no-proc-macro\", \ -\"features\", or \"all\" -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn mixed_no_edge_kinds() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree -e no-build,normal") - .with_stderr( - "\ -[ERROR] `normal` dependency kind cannot be mixed with \ -\"no-normal\", \"no-build\", or \"no-dev\" dependency kinds -", - ) - .with_status(101) - .run(); - - // `no-proc-macro` can be mixed with others - p.cargo("tree -e no-proc-macro,normal") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -", - ) - .run(); -} - -#[cargo_test] -fn depth_limit() { - let p = make_simple_proj(); - - p.cargo("tree --depth 0") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -[build-dependencies] -[dev-dependencies] -", - ) - .run(); - - p.cargo("tree --depth 1") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ a v1.0.0 -โ””โ”€โ”€ c v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bdep v1.0.0 -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 -", - ) - .run(); - - p.cargo("tree --depth 2") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ a v1.0.0 -โ”‚ โ””โ”€โ”€ b v1.0.0 -โ””โ”€โ”€ c v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -", - ) - .run(); - - // specify a package - p.cargo("tree -p bdep --depth 1") - .with_stdout( - "\ -bdep v1.0.0 -โ””โ”€โ”€ b v1.0.0 -", - ) - .run(); - - // different prefix - p.cargo("tree --depth 1 --prefix depth") - .with_stdout( - "\ -0foo v0.1.0 ([..]/foo) -1a v1.0.0 -1c v1.0.0 -1bdep v1.0.0 -1devdep v1.0.0 -", - ) - .run(); - - // with edge-kinds - p.cargo("tree --depth 1 -e no-dev") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ a v1.0.0 -โ””โ”€โ”€ c v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bdep v1.0.0 -", - ) - .run(); - - // invert - p.cargo("tree --depth 1 --invert c") - .with_stdout( - "\ -c v1.0.0 -โ”œโ”€โ”€ b v1.0.0 -โ””โ”€โ”€ foo v0.1.0 ([..]/foo) -", - ) - .run(); -} - -#[cargo_test] -fn prune() { - let p = make_simple_proj(); - - p.cargo("tree --prune c") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ a v1.0.0 - โ””โ”€โ”€ b v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -", - ) - .run(); - - // multiple prune - p.cargo("tree --prune c --prune bdep") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ a v1.0.0 - โ””โ”€โ”€ b v1.0.0 -[build-dependencies] -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -", - ) - .run(); - - // with edge-kinds - p.cargo("tree --prune c -e normal") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ a v1.0.0 - โ””โ”€โ”€ b v1.0.0 -", - ) - .run(); - - // pruning self does not works - p.cargo("tree --prune foo") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ a v1.0.0 -โ”‚ โ””โ”€โ”€ b v1.0.0 -โ”‚ โ””โ”€โ”€ c v1.0.0 -โ””โ”€โ”€ c v1.0.0 -[build-dependencies] -โ””โ”€โ”€ bdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -[dev-dependencies] -โ””โ”€โ”€ devdep v1.0.0 - โ””โ”€โ”€ b v1.0.0 (*) -", - ) - .run(); - - // dep not exist - p.cargo("tree --prune no-dep") - .with_stderr( - "\ -[ERROR] package ID specification `no-dep` did not match any packages - -Did you mean `bdep`? -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn cyclic_features() { - // Check for stack overflow with cyclic features (oops!). - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [features] - a = ["b"] - b = ["a"] - default = ["a"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree -e features") - .with_stdout("foo v1.0.0 ([ROOT]/foo)") - .run(); - - p.cargo("tree -e features -i foo") - .with_stdout( - "\ -foo v1.0.0 ([ROOT]/foo) -โ”œโ”€โ”€ foo feature \"a\" -โ”‚ โ”œโ”€โ”€ foo feature \"b\" -โ”‚ โ”‚ โ””โ”€โ”€ foo feature \"a\" (*) -โ”‚ โ””โ”€โ”€ foo feature \"default\" (command-line) -โ”œโ”€โ”€ foo feature \"b\" (*) -โ””โ”€โ”€ foo feature \"default\" (command-line) -", - ) - .run(); -} - -#[cargo_test] -fn dev_dep_cycle_with_feature() { - // Cycle with features and a dev-dependency. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [dev-dependencies] - bar = { path = "bar" } - - [features] - a = ["bar/feat1"] - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "1.0.0" - - [dependencies] - foo = { path = ".." } - - [features] - feat1 = ["foo/a"] - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("tree -e features --features a") - .with_stdout( - "\ -foo v1.0.0 ([ROOT]/foo) -[dev-dependencies] -โ””โ”€โ”€ bar feature \"default\" - โ””โ”€โ”€ bar v1.0.0 ([ROOT]/foo/bar) - โ””โ”€โ”€ foo feature \"default\" (command-line) - โ””โ”€โ”€ foo v1.0.0 ([ROOT]/foo) (*) -", - ) - .run(); - - p.cargo("tree -e features --features a -i foo") - .with_stdout( - "\ -foo v1.0.0 ([ROOT]/foo) -โ”œโ”€โ”€ foo feature \"a\" (command-line) -โ”‚ โ””โ”€โ”€ bar feature \"feat1\" -โ”‚ โ””โ”€โ”€ foo feature \"a\" (command-line) (*) -โ””โ”€โ”€ foo feature \"default\" (command-line) - โ””โ”€โ”€ bar v1.0.0 ([ROOT]/foo/bar) - โ”œโ”€โ”€ bar feature \"default\" - โ”‚ [dev-dependencies] - โ”‚ โ””โ”€โ”€ foo v1.0.0 ([ROOT]/foo) (*) - โ””โ”€โ”€ bar feature \"feat1\" (*) -", - ) - .run(); -} - -#[cargo_test] -fn dev_dep_cycle_with_feature_nested() { - // Checks for an issue where a cyclic dev dependency tries to activate a - // feature on its parent that tries to activate the feature back on the - // dev-dependency. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - - [dev-dependencies] - bar = { path = "bar" } - - [features] - a = ["bar/feat1"] - b = ["a"] - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "1.0.0" - - [dependencies] - foo = { path = ".." } - - [features] - feat1 = ["foo/b"] - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("tree -e features") - .with_stdout( - "\ -foo v1.0.0 ([ROOT]/foo) -[dev-dependencies] -โ””โ”€โ”€ bar feature \"default\" - โ””โ”€โ”€ bar v1.0.0 ([ROOT]/foo/bar) - โ””โ”€โ”€ foo feature \"default\" (command-line) - โ””โ”€โ”€ foo v1.0.0 ([ROOT]/foo) (*) -", - ) - .run(); - - p.cargo("tree -e features --features a -i foo") - .with_stdout( - "\ -foo v1.0.0 ([ROOT]/foo) -โ”œโ”€โ”€ foo feature \"a\" (command-line) -โ”‚ โ””โ”€โ”€ foo feature \"b\" -โ”‚ โ””โ”€โ”€ bar feature \"feat1\" -โ”‚ โ””โ”€โ”€ foo feature \"a\" (command-line) (*) -โ”œโ”€โ”€ foo feature \"b\" (*) -โ””โ”€โ”€ foo feature \"default\" (command-line) - โ””โ”€โ”€ bar v1.0.0 ([ROOT]/foo/bar) - โ”œโ”€โ”€ bar feature \"default\" - โ”‚ [dev-dependencies] - โ”‚ โ””โ”€โ”€ foo v1.0.0 ([ROOT]/foo) (*) - โ””โ”€โ”€ bar feature \"feat1\" (*) -", - ) - .run(); - - p.cargo("tree -e features --features b -i foo") - .with_stdout( - "\ -foo v1.0.0 ([ROOT]/foo) -โ”œโ”€โ”€ foo feature \"a\" -โ”‚ โ””โ”€โ”€ foo feature \"b\" (command-line) -โ”‚ โ””โ”€โ”€ bar feature \"feat1\" -โ”‚ โ””โ”€โ”€ foo feature \"a\" (*) -โ”œโ”€โ”€ foo feature \"b\" (command-line) (*) -โ””โ”€โ”€ foo feature \"default\" (command-line) - โ””โ”€โ”€ bar v1.0.0 ([ROOT]/foo/bar) - โ”œโ”€โ”€ bar feature \"default\" - โ”‚ [dev-dependencies] - โ”‚ โ””โ”€โ”€ foo v1.0.0 ([ROOT]/foo) (*) - โ””โ”€โ”€ bar feature \"feat1\" (*) -", - ) - .run(); - - p.cargo("tree -e features --features bar/feat1 -i foo") - .with_stdout( - "\ -foo v1.0.0 ([ROOT]/foo) -โ”œโ”€โ”€ foo feature \"a\" -โ”‚ โ””โ”€โ”€ foo feature \"b\" -โ”‚ โ””โ”€โ”€ bar feature \"feat1\" (command-line) -โ”‚ โ””โ”€โ”€ foo feature \"a\" (*) -โ”œโ”€โ”€ foo feature \"b\" (*) -โ””โ”€โ”€ foo feature \"default\" (command-line) - โ””โ”€โ”€ bar v1.0.0 ([ROOT]/foo/bar) - โ”œโ”€โ”€ bar feature \"default\" - โ”‚ [dev-dependencies] - โ”‚ โ””โ”€โ”€ foo v1.0.0 ([ROOT]/foo) (*) - โ””โ”€โ”€ bar feature \"feat1\" (command-line) (*) -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tree_graph_features.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tree_graph_features.rs deleted file mode 100644 index 2c90482ef..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/tree_graph_features.rs +++ /dev/null @@ -1,361 +0,0 @@ -//! Tests for the `cargo tree` command with -e features option. - -use cargo_test_support::project; -use cargo_test_support::registry::{Dependency, Package}; - -#[cargo_test] -fn dep_feature_various() { - // Checks different ways of setting features via dependencies. - Package::new("optdep", "1.0.0") - .feature("default", &["cat"]) - .feature("cat", &[]) - .publish(); - Package::new("defaultdep", "1.0.0") - .feature("default", &["f1"]) - .feature("f1", &["optdep"]) - .add_dep(Dependency::new("optdep", "1.0").optional(true)) - .publish(); - Package::new("nodefaultdep", "1.0.0") - .feature("default", &["f1"]) - .feature("f1", &[]) - .publish(); - Package::new("nameddep", "1.0.0") - .add_dep(Dependency::new("serde", "1.0").optional(true)) - .feature("default", &["serde-stuff"]) - .feature("serde-stuff", &["serde/derive"]) - .feature("vehicle", &["car"]) - .feature("car", &[]) - .publish(); - Package::new("serde_derive", "1.0.0").publish(); - Package::new("serde", "1.0.0") - .feature("derive", &["serde_derive"]) - .add_dep(Dependency::new("serde_derive", "1.0").optional(true)) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - defaultdep = "1.0" - nodefaultdep = {version="1.0", default-features = false} - nameddep = {version="1.0", features = ["vehicle", "serde"]} - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree -e features") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ nodefaultdep v1.0.0 -โ”œโ”€โ”€ defaultdep feature \"default\" -โ”‚ โ”œโ”€โ”€ defaultdep v1.0.0 -โ”‚ โ”‚ โ””โ”€โ”€ optdep feature \"default\" -โ”‚ โ”‚ โ”œโ”€โ”€ optdep v1.0.0 -โ”‚ โ”‚ โ””โ”€โ”€ optdep feature \"cat\" -โ”‚ โ”‚ โ””โ”€โ”€ optdep v1.0.0 -โ”‚ โ””โ”€โ”€ defaultdep feature \"f1\" -โ”‚ โ”œโ”€โ”€ defaultdep v1.0.0 (*) -โ”‚ โ””โ”€โ”€ defaultdep feature \"optdep\" -โ”‚ โ””โ”€โ”€ defaultdep v1.0.0 (*) -โ”œโ”€โ”€ nameddep feature \"default\" -โ”‚ โ”œโ”€โ”€ nameddep v1.0.0 -โ”‚ โ”‚ โ””โ”€โ”€ serde feature \"default\" -โ”‚ โ”‚ โ””โ”€โ”€ serde v1.0.0 -โ”‚ โ”‚ โ””โ”€โ”€ serde_derive feature \"default\" -โ”‚ โ”‚ โ””โ”€โ”€ serde_derive v1.0.0 -โ”‚ โ””โ”€โ”€ nameddep feature \"serde-stuff\" -โ”‚ โ”œโ”€โ”€ nameddep v1.0.0 (*) -โ”‚ โ”œโ”€โ”€ nameddep feature \"serde\" -โ”‚ โ”‚ โ””โ”€โ”€ nameddep v1.0.0 (*) -โ”‚ โ””โ”€โ”€ serde feature \"derive\" -โ”‚ โ”œโ”€โ”€ serde v1.0.0 (*) -โ”‚ โ””โ”€โ”€ serde feature \"serde_derive\" -โ”‚ โ””โ”€โ”€ serde v1.0.0 (*) -โ”œโ”€โ”€ nameddep feature \"serde\" (*) -โ””โ”€โ”€ nameddep feature \"vehicle\" - โ”œโ”€โ”€ nameddep v1.0.0 (*) - โ””โ”€โ”€ nameddep feature \"car\" - โ””โ”€โ”€ nameddep v1.0.0 (*) -", - ) - .run(); -} - -#[cargo_test] -fn graph_features_ws_interdependent() { - // A workspace with interdependent crates. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a", "b"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [dependencies] - b = {path="../b", features=["feat2"]} - - [features] - default = ["a1"] - a1 = [] - a2 = [] - "#, - ) - .file("a/src/lib.rs", "") - .file( - "b/Cargo.toml", - r#" - [package] - name = "b" - version = "0.1.0" - - [features] - default = ["feat1"] - feat1 = [] - feat2 = [] - "#, - ) - .file("b/src/lib.rs", "") - .build(); - - p.cargo("tree -e features") - .with_stdout( - "\ -a v0.1.0 ([..]/foo/a) -โ”œโ”€โ”€ b feature \"default\" (command-line) -โ”‚ โ”œโ”€โ”€ b v0.1.0 ([..]/foo/b) -โ”‚ โ””โ”€โ”€ b feature \"feat1\" -โ”‚ โ””โ”€โ”€ b v0.1.0 ([..]/foo/b) -โ””โ”€โ”€ b feature \"feat2\" - โ””โ”€โ”€ b v0.1.0 ([..]/foo/b) - -b v0.1.0 ([..]/foo/b) -", - ) - .run(); - - p.cargo("tree -e features -i a -i b") - .with_stdout( - "\ -a v0.1.0 ([..]/foo/a) -โ”œโ”€โ”€ a feature \"a1\" -โ”‚ โ””โ”€โ”€ a feature \"default\" (command-line) -โ””โ”€โ”€ a feature \"default\" (command-line) - -b v0.1.0 ([..]/foo/b) -โ”œโ”€โ”€ b feature \"default\" (command-line) -โ”‚ โ””โ”€โ”€ a v0.1.0 ([..]/foo/a) (*) -โ”œโ”€โ”€ b feature \"feat1\" -โ”‚ โ””โ”€โ”€ b feature \"default\" (command-line) (*) -โ””โ”€โ”€ b feature \"feat2\" - โ””โ”€โ”€ a v0.1.0 ([..]/foo/a) (*) -", - ) - .run(); -} - -#[cargo_test] -fn slash_feature_name() { - // dep_name/feat_name syntax - Package::new("opt", "1.0.0").feature("feat1", &[]).publish(); - Package::new("notopt", "1.0.0") - .feature("cat", &[]) - .feature("animal", &["cat"]) - .publish(); - Package::new("opt2", "1.0.0").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - opt = {version = "1.0", optional=true} - opt2 = {version = "1.0", optional=true} - notopt = "1.0" - - [features] - f1 = ["opt/feat1", "notopt/animal"] - f2 = ["f1"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("tree -e features --features f1") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ notopt feature \"default\" -โ”‚ โ””โ”€โ”€ notopt v1.0.0 -โ””โ”€โ”€ opt feature \"default\" - โ””โ”€โ”€ opt v1.0.0 -", - ) - .run(); - - p.cargo("tree -e features --features f1 -i foo") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ foo feature \"default\" (command-line) -โ”œโ”€โ”€ foo feature \"f1\" (command-line) -โ””โ”€โ”€ foo feature \"opt\" - โ””โ”€โ”€ foo feature \"f1\" (command-line) -", - ) - .run(); - - p.cargo("tree -e features --features f1 -i notopt") - .with_stdout( - "\ -notopt v1.0.0 -โ”œโ”€โ”€ notopt feature \"animal\" -โ”‚ โ””โ”€โ”€ foo feature \"f1\" (command-line) -โ”œโ”€โ”€ notopt feature \"cat\" -โ”‚ โ””โ”€โ”€ notopt feature \"animal\" (*) -โ””โ”€โ”€ notopt feature \"default\" - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) - โ”œโ”€โ”€ foo feature \"default\" (command-line) - โ”œโ”€โ”€ foo feature \"f1\" (command-line) - โ””โ”€โ”€ foo feature \"opt\" - โ””โ”€โ”€ foo feature \"f1\" (command-line) -", - ) - .run(); - - p.cargo("tree -e features --features notopt/animal -i notopt") - .with_stdout( - "\ -notopt v1.0.0 -โ”œโ”€โ”€ notopt feature \"animal\" (command-line) -โ”œโ”€โ”€ notopt feature \"cat\" -โ”‚ โ””โ”€โ”€ notopt feature \"animal\" (command-line) -โ””โ”€โ”€ notopt feature \"default\" - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) - โ””โ”€โ”€ foo feature \"default\" (command-line) -", - ) - .run(); - - p.cargo("tree -e features --all-features") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ notopt feature \"default\" -โ”‚ โ””โ”€โ”€ notopt v1.0.0 -โ”œโ”€โ”€ opt feature \"default\" -โ”‚ โ””โ”€โ”€ opt v1.0.0 -โ””โ”€โ”€ opt2 feature \"default\" - โ””โ”€โ”€ opt2 v1.0.0 -", - ) - .run(); - - p.cargo("tree -e features --all-features -i opt2") - .with_stdout( - "\ -opt2 v1.0.0 -โ””โ”€โ”€ opt2 feature \"default\" - โ””โ”€โ”€ foo v0.1.0 ([..]/foo) - โ”œโ”€โ”€ foo feature \"f1\" (command-line) - โ”‚ โ””โ”€โ”€ foo feature \"f2\" (command-line) - โ”œโ”€โ”€ foo feature \"f2\" (command-line) - โ”œโ”€โ”€ foo feature \"opt\" (command-line) - โ”‚ โ””โ”€โ”€ foo feature \"f1\" (command-line) (*) - โ””โ”€โ”€ foo feature \"opt2\" (command-line) -", - ) - .run(); -} - -#[cargo_test] -fn features_enables_inactive_target() { - // Features that enable things on targets that are not enabled. - Package::new("optdep", "1.0.0") - .feature("feat1", &[]) - .publish(); - Package::new("dep1", "1.0.0") - .feature("somefeat", &[]) - .publish(); - Package::new("dep2", "1.0.0") - .add_dep( - Dependency::new("optdep", "1.0.0") - .optional(true) - .target("cfg(whatever)"), - ) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [target.'cfg(whatever)'.dependencies] - optdep = {version="1.0", optional=true} - dep1 = "1.0" - - [dependencies] - dep2 = "1.0" - - [features] - f1 = ["optdep"] - f2 = ["optdep/feat1"] - f3 = ["dep1/somefeat"] - f4 = ["dep2/optdep"] - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("tree -e features") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ dep2 feature \"default\" - โ””โ”€โ”€ dep2 v1.0.0 -", - ) - .run(); - p.cargo("tree -e features --all-features") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ””โ”€โ”€ dep2 feature \"default\" - โ””โ”€โ”€ dep2 v1.0.0 -", - ) - .run(); - p.cargo("tree -e features --all-features --target=all") - .with_stdout( - "\ -foo v0.1.0 ([..]/foo) -โ”œโ”€โ”€ dep1 feature \"default\" -โ”‚ โ””โ”€โ”€ dep1 v1.0.0 -โ”œโ”€โ”€ dep2 feature \"default\" -โ”‚ โ””โ”€โ”€ dep2 v1.0.0 -โ”‚ โ””โ”€โ”€ optdep feature \"default\" -โ”‚ โ””โ”€โ”€ optdep v1.0.0 -โ””โ”€โ”€ optdep feature \"default\" (*) -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/unit_graph.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/unit_graph.rs deleted file mode 100644 index b61cb453f..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/unit_graph.rs +++ /dev/null @@ -1,233 +0,0 @@ -//! Tests for --unit-graph option. - -use cargo_test_support::project; -use cargo_test_support::registry::Package; - -#[cargo_test] -fn gated() { - let p = project().file("src/lib.rs", "").build(); - p.cargo("build --unit-graph") - .with_status(101) - .with_stderr( - "\ -[ERROR] the `--unit-graph` flag is unstable[..] -See [..] -See [..] -", - ) - .run(); -} - -#[cargo_test] -fn simple() { - Package::new("a", "1.0.0") - .dep("b", "1.0") - .feature("feata", &["b/featb"]) - .publish(); - Package::new("b", "1.0.0") - .dep("c", "1.0") - .feature("featb", &["c/featc"]) - .publish(); - Package::new("c", "1.0.0").feature("featc", &[]).publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build --features a/feata --unit-graph -Zunstable-options") - .masquerade_as_nightly_cargo() - .with_json( - r#"{ - "roots": [ - 3 - ], - "units": [ - { - "dependencies": [ - { - "extern_crate_name": "b", - "index": 1, - "noprelude": false, - "public": false - } - ], - "features": [ - "feata" - ], - "mode": "build", - "pkg_id": "a 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "platform": null, - "profile": { - "codegen_backend": null, - "codegen_units": null, - "debug_assertions": true, - "debuginfo": 2, - "incremental": false, - "lto": "false", - "name": "dev", - "opt_level": "0", - "overflow_checks": true, - "panic": "unwind", - "rpath": false, - "split_debuginfo": "{...}", - "strip": "none" - }, - "target": { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "a", - "src_path": "[..]/a-1.0.0/src/lib.rs", - "test": true - } - }, - { - "dependencies": [ - { - "extern_crate_name": "c", - "index": 2, - "noprelude": false, - "public": false - } - ], - "features": [ - "featb" - ], - "mode": "build", - "pkg_id": "b 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "platform": null, - "profile": { - "codegen_backend": null, - "codegen_units": null, - "debug_assertions": true, - "debuginfo": 2, - "incremental": false, - "lto": "false", - "name": "dev", - "opt_level": "0", - "overflow_checks": true, - "panic": "unwind", - "rpath": false, - "split_debuginfo": "{...}", - "strip": "none" - }, - "target": { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "b", - "src_path": "[..]/b-1.0.0/src/lib.rs", - "test": true - } - }, - { - "dependencies": [], - "features": [ - "featc" - ], - "mode": "build", - "pkg_id": "c 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "platform": null, - "profile": { - "codegen_backend": null, - "codegen_units": null, - "debug_assertions": true, - "debuginfo": 2, - "incremental": false, - "lto": "false", - "name": "dev", - "opt_level": "0", - "overflow_checks": true, - "panic": "unwind", - "rpath": false, - "split_debuginfo": "{...}", - "strip": "none" - }, - "target": { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "c", - "src_path": "[..]/c-1.0.0/src/lib.rs", - "test": true - } - }, - { - "dependencies": [ - { - "extern_crate_name": "a", - "index": 0, - "noprelude": false, - "public": false - } - ], - "features": [], - "mode": "build", - "pkg_id": "foo 0.1.0 (path+file://[..]/foo)", - "platform": null, - "profile": { - "codegen_backend": null, - "codegen_units": null, - "debug_assertions": true, - "debuginfo": 2, - "incremental": false, - "lto": "false", - "name": "dev", - "opt_level": "0", - "overflow_checks": true, - "panic": "unwind", - "rpath": false, - "split_debuginfo": "{...}", - "strip": "none" - }, - "target": { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "foo", - "src_path": "[..]/foo/src/lib.rs", - "test": true - } - } - ], - "version": 1 - } - "#, - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/update.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/update.rs deleted file mode 100644 index 6a3f2fe78..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/update.rs +++ /dev/null @@ -1,760 +0,0 @@ -//! Tests for the `cargo update` command. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_manifest, project}; - -#[cargo_test] -fn minor_update_two_places() { - Package::new("log", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - log = "0.1" - foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - log = "0.1" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - Package::new("log", "0.1.1").publish(); - - p.change_file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - log = "0.1.1" - "#, - ); - - p.cargo("build").run(); -} - -#[cargo_test] -fn transitive_minor_update() { - Package::new("log", "0.1.0").publish(); - Package::new("serde", "0.1.0").dep("log", "0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - serde = "0.1" - log = "0.1" - foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - serde = "0.1" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - Package::new("log", "0.1.1").publish(); - Package::new("serde", "0.1.1").dep("log", "0.1.1").publish(); - - // Note that `serde` isn't actually updated here! The default behavior for - // `update` right now is to as conservatively as possible attempt to satisfy - // an update. In this case we previously locked the dependency graph to `log - // 0.1.0`, but nothing on the command line says we're allowed to update - // that. As a result the update of `serde` here shouldn't update to `serde - // 0.1.1` as that would also force an update to `log 0.1.1`. - // - // Also note that this is probably counterintuitive and weird. We may wish - // to change this one day. - p.cargo("update -p serde") - .with_stderr( - "\ -[UPDATING] `[..]` index -", - ) - .run(); -} - -#[cargo_test] -fn conservative() { - Package::new("log", "0.1.0").publish(); - Package::new("serde", "0.1.0").dep("log", "0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - serde = "0.1" - log = "0.1" - foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - serde = "0.1" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - Package::new("log", "0.1.1").publish(); - Package::new("serde", "0.1.1").dep("log", "0.1").publish(); - - p.cargo("update -p serde") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] serde v0.1.0 -> v0.1.1 -", - ) - .run(); -} - -#[cargo_test] -fn update_via_new_dep() { - Package::new("log", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - log = "0.1" - # foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - log = "0.1.1" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - Package::new("log", "0.1.1").publish(); - - p.uncomment_root_manifest(); - p.cargo("build").env("CARGO_LOG", "cargo=trace").run(); -} - -#[cargo_test] -fn update_via_new_member() { - Package::new("log", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [workspace] - # members = [ "foo" ] - - [dependencies] - log = "0.1" - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - log = "0.1.1" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - Package::new("log", "0.1.1").publish(); - - p.uncomment_root_manifest(); - p.cargo("build").run(); -} - -#[cargo_test] -fn add_dep_deep_new_requirement() { - Package::new("log", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - log = "0.1" - # bar = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - Package::new("log", "0.1.1").publish(); - Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); - - p.uncomment_root_manifest(); - p.cargo("build").run(); -} - -#[cargo_test] -fn everything_real_deep() { - Package::new("log", "0.1.0").publish(); - Package::new("foo", "0.1.0").dep("log", "0.1").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - foo = "0.1" - # bar = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - Package::new("log", "0.1.1").publish(); - Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); - - p.uncomment_root_manifest(); - p.cargo("build").run(); -} - -#[cargo_test] -fn change_package_version() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "a-foo" - version = "0.2.0-alpha" - authors = [] - - [dependencies] - bar = { path = "bar", version = "0.2.0-alpha" } - "#, - ) - .file("src/lib.rs", "") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.2.0-alpha")) - .file("bar/src/lib.rs", "") - .file( - "Cargo.lock", - r#" - [[package]] - name = "foo" - version = "0.2.0" - dependencies = ["bar 0.2.0"] - - [[package]] - name = "bar" - version = "0.2.0" - "#, - ) - .build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn update_precise() { - Package::new("log", "0.1.0").publish(); - Package::new("serde", "0.1.0").publish(); - Package::new("serde", "0.2.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - serde = "0.2" - foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - serde = "0.1" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - - Package::new("serde", "0.2.0").publish(); - - p.cargo("update -p serde:0.2.1 --precise 0.2.0") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] serde v0.2.1 -> v0.2.0 -", - ) - .run(); -} - -// cargo update should respect its arguments even without a lockfile. -// See issue "Running cargo update without a Cargo.lock ignores arguments" -// at . -#[cargo_test] -fn update_precise_first_run() { - Package::new("serde", "0.1.0").publish(); - Package::new("serde", "0.2.0").publish(); - Package::new("serde", "0.2.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - - [dependencies] - serde = "0.2" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("update -p serde --precise 0.2.0") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] serde v0.2.1 -> v0.2.0 -", - ) - .run(); - - // Assert `cargo metadata` shows serde 0.2.0 - p.cargo("metadata") - .with_json( - r#"{ - "packages": [ - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [ - { - "features": [], - "kind": null, - "name": "serde", - "optional": false, - "registry": null, - "rename": null, - "req": "^0.2", - "source": "registry+https://github.com/rust-lang/crates.io-index", - "target": null, - "uses_default_features": true - } - ], - "description": null, - "documentation": null, - "edition": "2015", - "features": {}, - "homepage": null, - "id": "bar 0.0.1 (path+file://[..]/foo)", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]/foo/Cargo.toml", - "metadata": null, - "publish": null, - "name": "bar", - "readme": null, - "repository": null, - "rust_version": null, - "source": null, - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "test": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "bar", - "src_path": "[..]/foo/src/lib.rs" - } - ], - "version": "0.0.1" - }, - { - "authors": [], - "categories": [], - "default_run": null, - "dependencies": [], - "description": null, - "documentation": null, - "edition": "2015", - "features": {}, - "homepage": null, - "id": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "keywords": [], - "license": null, - "license_file": null, - "links": null, - "manifest_path": "[..]/home/.cargo/registry/src/-[..]/serde-0.2.0/Cargo.toml", - "metadata": null, - "publish": null, - "name": "serde", - "readme": null, - "repository": null, - "rust_version": null, - "source": "registry+https://github.com/rust-lang/crates.io-index", - "targets": [ - { - "crate_types": [ - "lib" - ], - "doc": true, - "doctest": true, - "edition": "2015", - "kind": [ - "lib" - ], - "name": "serde", - "src_path": "[..]/home/.cargo/registry/src/-[..]/serde-0.2.0/src/lib.rs", - "test": true - } - ], - "version": "0.2.0" - } - ], - "resolve": { - "nodes": [ - { - "dependencies": [ - "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" - ], - "deps": [ - { - "dep_kinds": [ - { - "kind": null, - "target": null - } - ], - "name": "serde", - "pkg": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" - } - ], - "features": [], - "id": "bar 0.0.1 (path+file://[..]/foo)" - }, - { - "dependencies": [], - "deps": [], - "features": [], - "id": "serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" - } - ], - "root": "bar 0.0.1 (path+file://[..]/foo)" - }, - "target_directory": "[..]/foo/target", - "version": 1, - "workspace_members": [ - "bar 0.0.1 (path+file://[..]/foo)" - ], - "workspace_root": "[..]/foo", - "metadata": null -}"#, - ) - .run(); - - p.cargo("update -p serde --precise 0.2.0") - .with_stderr( - "\ -[UPDATING] `[..]` index -", - ) - .run(); -} - -#[cargo_test] -fn preserve_top_comment() { - let p = project().file("src/lib.rs", "").build(); - - p.cargo("update").run(); - - let lockfile = p.read_lockfile(); - assert!(lockfile.starts_with("# This file is automatically @generated by Cargo.\n# It is not intended for manual editing.\n")); - - let mut lines = lockfile.lines().collect::>(); - lines.insert(2, "# some other comment"); - let mut lockfile = lines.join("\n"); - lockfile.push('\n'); // .lines/.join loses the last newline - println!("saving Cargo.lock contents:\n{}", lockfile); - - p.change_file("Cargo.lock", &lockfile); - - p.cargo("update").run(); - - let lockfile2 = p.read_lockfile(); - println!("loaded Cargo.lock contents:\n{}", lockfile2); - - assert_eq!(lockfile, lockfile2); -} - -#[cargo_test] -fn dry_run_update() { - Package::new("log", "0.1.0").publish(); - Package::new("serde", "0.1.0").dep("log", "0.1").publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.0.1" - authors = [] - - [dependencies] - serde = "0.1" - log = "0.1" - foo = { path = "foo" } - "#, - ) - .file("src/lib.rs", "") - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - serde = "0.1" - "#, - ) - .file("foo/src/lib.rs", "") - .build(); - - p.cargo("build").run(); - let old_lockfile = p.read_lockfile(); - - Package::new("log", "0.1.1").publish(); - Package::new("serde", "0.1.1").dep("log", "0.1").publish(); - - p.cargo("update -p serde --dry-run") - .with_stderr( - "\ -[UPDATING] `[..]` index -[UPDATING] serde v0.1.0 -> v0.1.1 -[WARNING] not updating lockfile due to dry run -", - ) - .run(); - let new_lockfile = p.read_lockfile(); - assert_eq!(old_lockfile, new_lockfile) -} - -#[cargo_test] -fn workspace_only() { - let p = project().file("src/main.rs", "fn main() {}").build(); - p.cargo("generate-lockfile").run(); - let lock1 = p.read_lockfile(); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - authors = [] - version = "0.0.2" - "#, - ); - p.cargo("update --workspace").run(); - let lock2 = p.read_lockfile(); - - assert_ne!(lock1, lock2); - assert!(lock1.contains("0.0.1")); - assert!(lock2.contains("0.0.2")); - assert!(!lock1.contains("0.0.2")); - assert!(!lock2.contains("0.0.1")); -} - -#[cargo_test] -fn precise_with_build_metadata() { - // +foo syntax shouldn't be necessary with --precise - Package::new("bar", "0.1.0+extra-stuff.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("generate-lockfile").run(); - Package::new("bar", "0.1.1+extra-stuff.1").publish(); - Package::new("bar", "0.1.2+extra-stuff.2").publish(); - - p.cargo("update -p bar --precise 0.1") - .with_status(101) - .with_stderr( - "\ -error: invalid version format for precise version `0.1` - -Caused by: - unexpected end of input while parsing minor version number -", - ) - .run(); - - p.cargo("update -p bar --precise 0.1.1+does-not-match") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -error: no matching package named `bar` found -location searched: registry `crates-io` -required by package `foo v0.1.0 ([ROOT]/foo)` -", - ) - .run(); - - p.cargo("update -p bar --precise 0.1.1") - .with_stderr( - "\ -[UPDATING] [..] index -[UPDATING] bar v0.1.0+extra-stuff.0 -> v0.1.1+extra-stuff.1 -", - ) - .run(); - - Package::new("bar", "0.1.3").publish(); - p.cargo("update -p bar --precise 0.1.3+foo") - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] index -error: no matching package named `bar` found -location searched: registry `crates-io` -required by package `foo v0.1.0 ([ROOT]/foo)` -", - ) - .run(); - - p.cargo("update -p bar --precise 0.1.3") - .with_stderr( - "\ -[UPDATING] [..] index -[UPDATING] bar v0.1.1+extra-stuff.1 -> v0.1.3 -", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/vendor.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/vendor.rs deleted file mode 100644 index 75d78d7d9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/vendor.rs +++ /dev/null @@ -1,786 +0,0 @@ -//! Tests for the `cargo vendor` command. -//! -//! Note that every test here uses `--respect-source-config` so that the -//! "fake" crates.io is used. Otherwise `vendor` would download the crates.io -//! index from the network. - -use std::fs; - -use cargo_test_support::git; -use cargo_test_support::registry::{self, Package}; -use cargo_test_support::{basic_lib_manifest, paths, project, Project}; - -#[cargo_test] -fn vendor_simple() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - log = "0.3.5" - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("log", "0.3.5").publish(); - - p.cargo("vendor --respect-source-config").run(); - let lock = p.read_file("vendor/log/Cargo.toml"); - assert!(lock.contains("version = \"0.3.5\"")); - - add_vendor_config(&p); - p.cargo("build").run(); -} - -fn add_vendor_config(p: &Project) { - p.change_file( - ".cargo/config", - r#" - [source.crates-io] - replace-with = 'vendor' - - [source.vendor] - directory = 'vendor' - "#, - ); -} - -#[cargo_test] -fn package_exclude() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("bar", "0.1.0") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - exclude = [".*", "!.include", "!.dotdir/include"] - "#, - ) - .file("src/lib.rs", "") - .file(".exclude", "") - .file(".include", "") - .file(".dotdir/exclude", "") - .file(".dotdir/include", "") - .publish(); - - p.cargo("vendor --respect-source-config").run(); - let csum = dbg!(p.read_file("vendor/bar/.cargo-checksum.json")); - assert!(csum.contains(".include")); - assert!(!csum.contains(".exclude")); - assert!(!csum.contains(".dotdir/exclude")); - // Gitignore doesn't re-include a file in an excluded parent directory, - // even if negating it explicitly. - assert!(!csum.contains(".dotdir/include")); -} - -#[cargo_test] -fn two_versions() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bitflags = "0.8.0" - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [dependencies] - bitflags = "0.7.0" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - Package::new("bitflags", "0.7.0").publish(); - Package::new("bitflags", "0.8.0").publish(); - - p.cargo("vendor --respect-source-config").run(); - - let lock = p.read_file("vendor/bitflags/Cargo.toml"); - assert!(lock.contains("version = \"0.8.0\"")); - let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml"); - assert!(lock.contains("version = \"0.7.0\"")); - - add_vendor_config(&p); - p.cargo("build").run(); -} - -#[cargo_test] -fn two_explicit_versions() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bitflags = "0.8.0" - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [dependencies] - bitflags = "0.7.0" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - Package::new("bitflags", "0.7.0").publish(); - Package::new("bitflags", "0.8.0").publish(); - - p.cargo("vendor --respect-source-config --versioned-dirs") - .run(); - - let lock = p.read_file("vendor/bitflags-0.8.0/Cargo.toml"); - assert!(lock.contains("version = \"0.8.0\"")); - let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml"); - assert!(lock.contains("version = \"0.7.0\"")); - - add_vendor_config(&p); - p.cargo("build").run(); -} - -#[cargo_test] -fn help() { - let p = project().build(); - p.cargo("vendor -h").run(); -} - -#[cargo_test] -fn update_versions() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bitflags = "0.7.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("bitflags", "0.7.0").publish(); - Package::new("bitflags", "0.8.0").publish(); - - p.cargo("vendor --respect-source-config").run(); - - let lock = p.read_file("vendor/bitflags/Cargo.toml"); - assert!(lock.contains("version = \"0.7.0\"")); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bitflags = "0.8.0" - "#, - ); - p.cargo("vendor --respect-source-config").run(); - - let lock = p.read_file("vendor/bitflags/Cargo.toml"); - assert!(lock.contains("version = \"0.8.0\"")); -} - -#[cargo_test] -fn two_lockfiles() { - let p = project() - .no_manifest() - .file( - "foo/Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bitflags = "=0.7.0" - "#, - ) - .file("foo/src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - - [dependencies] - bitflags = "=0.8.0" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - Package::new("bitflags", "0.7.0").publish(); - Package::new("bitflags", "0.8.0").publish(); - - p.cargo("vendor --respect-source-config -s bar/Cargo.toml --manifest-path foo/Cargo.toml") - .run(); - - let lock = p.read_file("vendor/bitflags/Cargo.toml"); - assert!(lock.contains("version = \"0.8.0\"")); - let lock = p.read_file("vendor/bitflags-0.7.0/Cargo.toml"); - assert!(lock.contains("version = \"0.7.0\"")); - - add_vendor_config(&p); - p.cargo("build").cwd("foo").run(); - p.cargo("build").cwd("bar").run(); -} - -#[cargo_test] -fn delete_old_crates() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bitflags = "=0.7.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("bitflags", "0.7.0").publish(); - Package::new("log", "0.3.5").publish(); - - p.cargo("vendor --respect-source-config").run(); - p.read_file("vendor/bitflags/Cargo.toml"); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - log = "=0.3.5" - "#, - ); - - p.cargo("vendor --respect-source-config").run(); - let lock = p.read_file("vendor/log/Cargo.toml"); - assert!(lock.contains("version = \"0.3.5\"")); - assert!(!p.root().join("vendor/bitflags/Cargo.toml").exists()); -} - -#[cargo_test] -fn ignore_files() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - url = "1.4.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("url", "1.4.1") - .file("src/lib.rs", "") - .file("foo.orig", "") - .file(".gitignore", "") - .file(".gitattributes", "") - .file("foo.rej", "") - .publish(); - - p.cargo("vendor --respect-source-config").run(); - let csum = p.read_file("vendor/url/.cargo-checksum.json"); - assert!(!csum.contains("foo.orig")); - assert!(!csum.contains(".gitignore")); - assert!(!csum.contains(".gitattributes")); - assert!(!csum.contains(".cargo-ok")); - assert!(!csum.contains("foo.rej")); -} - -#[cargo_test] -fn included_files_only() { - let git = git::new("a", |p| { - p.file("Cargo.toml", &basic_lib_manifest("a")) - .file("src/lib.rs", "") - .file(".gitignore", "a") - .file("a/b.md", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = {{ git = '{}' }} - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("vendor --respect-source-config").run(); - let csum = p.read_file("vendor/a/.cargo-checksum.json"); - assert!(!csum.contains("a/b.md")); -} - -#[cargo_test] -fn dependent_crates_in_crates() { - let git = git::new("a", |p| { - p.file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [dependencies] - b = { path = 'b' } - "#, - ) - .file("src/lib.rs", "") - .file("b/Cargo.toml", &basic_lib_manifest("b")) - .file("b/src/lib.rs", "") - }); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = {{ git = '{}' }} - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("vendor --respect-source-config").run(); - p.read_file("vendor/a/.cargo-checksum.json"); - p.read_file("vendor/b/.cargo-checksum.json"); -} - -#[cargo_test] -fn vendoring_git_crates() { - let git = git::new("git", |p| { - p.file("Cargo.toml", &basic_lib_manifest("serde_derive")) - .file("src/lib.rs", "") - .file("src/wut.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies.serde] - version = "0.5.0" - - [dependencies.serde_derive] - version = "0.5.0" - - [patch.crates-io] - serde_derive = {{ git = '{}' }} - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .build(); - Package::new("serde", "0.5.0") - .dep("serde_derive", "0.5") - .publish(); - Package::new("serde_derive", "0.5.0").publish(); - - p.cargo("vendor --respect-source-config").run(); - p.read_file("vendor/serde_derive/src/wut.rs"); - - add_vendor_config(&p); - p.cargo("build").run(); -} - -#[cargo_test] -fn git_simple() { - let git = git::new("git", |p| { - p.file("Cargo.toml", &basic_lib_manifest("a")) - .file("src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = {{ git = '{}' }} - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("vendor --respect-source-config").run(); - let csum = p.read_file("vendor/a/.cargo-checksum.json"); - assert!(csum.contains("\"package\":null")); -} - -#[cargo_test] -fn git_duplicate() { - let git = git::new("a", |p| { - p.file( - "Cargo.toml", - r#" - [package] - name = "a" - version = "0.1.0" - - [dependencies] - b = { path = 'b' } - "#, - ) - .file("src/lib.rs", "") - .file("b/Cargo.toml", &basic_lib_manifest("b")) - .file("b/src/lib.rs", "") - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = {{ git = '{}' }} - b = '0.5.0' - - "#, - git.url() - ), - ) - .file("src/lib.rs", "") - .build(); - Package::new("b", "0.5.0").publish(); - - p.cargo("vendor --respect-source-config") - .with_stderr( - "\ -[UPDATING] [..] -[UPDATING] [..] -[DOWNLOADING] [..] -[DOWNLOADED] [..] -error: failed to sync - -Caused by: - found duplicate version of package `b v0.5.0` vendored from two sources: - - source 1: [..] - source 2: [..] -", - ) - .with_status(101) - .run(); -} - -#[cargo_test] -fn depend_on_vendor_dir_not_deleted() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - libc = "0.2.30" - "#, - ) - .file("src/lib.rs", "") - .build(); - - Package::new("libc", "0.2.30").publish(); - - p.cargo("vendor --respect-source-config").run(); - assert!(p.root().join("vendor/libc").is_dir()); - - p.change_file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - libc = "0.2.30" - - [patch.crates-io] - libc = { path = 'vendor/libc' } - "#, - ); - - p.cargo("vendor --respect-source-config").run(); - assert!(p.root().join("vendor/libc").is_dir()); -} - -#[cargo_test] -fn ignore_hidden() { - // Don't delete files starting with `.` - Package::new("bar", "0.1.0").publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "1.0.0" - [dependencies] - bar = "0.1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("vendor --respect-source-config").run(); - // Add a `.git` directory. - let repo = git::init(&p.root().join("vendor")); - git::add(&repo); - git::commit(&repo); - assert!(p.root().join("vendor/.git").exists()); - // Vendor again, shouldn't change anything. - p.cargo("vendor --respect-source-config").run(); - // .git should not be removed. - assert!(p.root().join("vendor/.git").exists()); - // And just for good measure, make sure no files changed. - let mut opts = git2::StatusOptions::new(); - assert!(repo - .statuses(Some(&mut opts)) - .unwrap() - .iter() - .all(|status| status.status() == git2::Status::CURRENT)); -} - -#[cargo_test] -fn config_instructions_works() { - // Check that the config instructions work for all dependency kinds. - registry::alt_init(); - Package::new("dep", "0.1.0").publish(); - Package::new("altdep", "0.1.0").alternative(true).publish(); - let git_project = git::new("gitdep", |project| { - project - .file("Cargo.toml", &basic_lib_manifest("gitdep")) - .file("src/lib.rs", "") - }); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = "0.1" - altdep = {{version="0.1", registry="alternative"}} - gitdep = {{git='{}'}} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .build(); - let output = p - .cargo("vendor --respect-source-config") - .exec_with_output() - .unwrap(); - let output = String::from_utf8(output.stdout).unwrap(); - p.change_file(".cargo/config", &output); - - p.cargo("check -v") - .with_stderr_contains("[..]foo/vendor/dep/src/lib.rs[..]") - .with_stderr_contains("[..]foo/vendor/altdep/src/lib.rs[..]") - .with_stderr_contains("[..]foo/vendor/gitdep/src/lib.rs[..]") - .run(); -} - -#[cargo_test] -fn git_crlf_preservation() { - // Check that newlines don't get changed when you vendor - // (will only fail if your system is setup with core.autocrlf=true on windows) - let input = "hello \nthere\nmy newline\nfriends"; - let git_project = git::new("git", |p| { - p.file("Cargo.toml", &basic_lib_manifest("a")) - .file("src/lib.rs", input) - }); - - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - a = {{ git = '{}' }} - "#, - git_project.url() - ), - ) - .file("src/lib.rs", "") - .build(); - - fs::write( - paths::home().join(".gitconfig"), - r#" - [core] - autocrlf = true - "#, - ) - .unwrap(); - - p.cargo("vendor --respect-source-config").run(); - let output = p.read_file("vendor/a/src/lib.rs"); - assert_eq!(input, output); -} - -#[cargo_test] -#[cfg(unix)] -fn vendor_preserves_permissions() { - use std::os::unix::fs::MetadataExt; - - Package::new("bar", "1.0.0") - .file_with_mode("example.sh", 0o755, "#!/bin/sh") - .file("src/lib.rs", "") - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("vendor --respect-source-config").run(); - - let metadata = fs::metadata(p.root().join("vendor/bar/src/lib.rs")).unwrap(); - assert_eq!(metadata.mode() & 0o777, 0o644); - let metadata = fs::metadata(p.root().join("vendor/bar/example.sh")).unwrap(); - assert_eq!(metadata.mode() & 0o777, 0o755); -} - -#[cargo_test] -fn no_remote_dependency_no_vendor() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - [dependencies] - bar = { path = "bar" } - "#, - ) - .file("src/lib.rs", "") - .file( - "bar/Cargo.toml", - r#" - [package] - name = "bar" - version = "0.1.0" - "#, - ) - .file("bar/src/lib.rs", "") - .build(); - - p.cargo("vendor") - .with_stderr("There is no dependency to vendor in this project.") - .run(); - assert!(!p.root().join("vendor").exists()); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/verify_project.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/verify_project.rs deleted file mode 100644 index 3769aefa9..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/verify_project.rs +++ /dev/null @@ -1,73 +0,0 @@ -//! Tests for the `cargo verify-project` command. - -use cargo_test_support::{basic_bin_manifest, main_file, project}; - -fn verify_project_success_output() -> String { - r#"{"success":"true"}"#.into() -} - -#[cargo_test] -fn cargo_verify_project_path_to_cargo_toml_relative() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("verify-project --manifest-path foo/Cargo.toml") - .cwd(p.root().parent().unwrap()) - .with_stdout(verify_project_success_output()) - .run(); -} - -#[cargo_test] -fn cargo_verify_project_path_to_cargo_toml_absolute() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("verify-project --manifest-path") - .arg(p.root().join("Cargo.toml")) - .cwd(p.root().parent().unwrap()) - .with_stdout(verify_project_success_output()) - .run(); -} - -#[cargo_test] -fn cargo_verify_project_cwd() { - let p = project() - .file("Cargo.toml", &basic_bin_manifest("foo")) - .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) - .build(); - - p.cargo("verify-project") - .with_stdout(verify_project_success_output()) - .run(); -} - -#[cargo_test] -fn cargo_verify_project_honours_unstable_features() { - let p = project() - .file( - "Cargo.toml", - r#" - cargo-features = ["test-dummy-unstable"] - - [package] - name = "foo" - version = "0.0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("verify-project") - .masquerade_as_nightly_cargo() - .with_stdout(verify_project_success_output()) - .run(); - - p.cargo("verify-project") - .with_status(1) - .with_json(r#"{"invalid":"failed to parse manifest at `[CWD]/Cargo.toml`"}"#) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/version.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/version.rs deleted file mode 100644 index a5e4676dd..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/version.rs +++ /dev/null @@ -1,55 +0,0 @@ -//! Tests for displaying the cargo version. - -use cargo_test_support::{cargo_process, project}; - -#[cargo_test] -fn simple() { - let p = project().build(); - - p.cargo("version") - .with_stdout(&format!("cargo {}\n", cargo::version())) - .run(); - - p.cargo("--version") - .with_stdout(&format!("cargo {}\n", cargo::version())) - .run(); -} - -#[cargo_test] -#[cfg_attr(target_os = "windows", ignore)] -fn version_works_without_rustc() { - let p = project().build(); - p.cargo("version").env("PATH", "").run(); -} - -#[cargo_test] -fn version_works_with_bad_config() { - let p = project().file(".cargo/config", "this is not toml").build(); - p.cargo("version").run(); -} - -#[cargo_test] -fn version_works_with_bad_target_dir() { - let p = project() - .file( - ".cargo/config", - r#" - [build] - target-dir = 4 - "#, - ) - .build(); - p.cargo("version").run(); -} - -#[cargo_test] -fn verbose() { - // This is mainly to check that it doesn't explode. - cargo_process("-vV") - .with_stdout_contains(&format!("cargo {}", cargo::version())) - .with_stdout_contains("host: [..]") - .with_stdout_contains("libgit2: [..]") - .with_stdout_contains("libcurl: [..]") - .with_stdout_contains("os: [..]") - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/warn_on_failure.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/warn_on_failure.rs deleted file mode 100644 index 19cb01813..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/warn_on_failure.rs +++ /dev/null @@ -1,111 +0,0 @@ -//! Tests for whether or not warnings are displayed for build scripts. - -use cargo_test_support::registry::Package; -use cargo_test_support::{project, Project}; - -static WARNING1: &str = "Hello! I'm a warning. :)"; -static WARNING2: &str = "And one more!"; - -fn make_lib(lib_src: &str) { - Package::new("bar", "0.0.1") - .file( - "Cargo.toml", - r#" - [package] - name = "bar" - authors = [] - version = "0.0.1" - build = "build.rs" - "#, - ) - .file( - "build.rs", - &format!( - r#" - fn main() {{ - use std::io::Write; - println!("cargo:warning={{}}", "{}"); - println!("hidden stdout"); - write!(&mut ::std::io::stderr(), "hidden stderr"); - println!("cargo:warning={{}}", "{}"); - }} - "#, - WARNING1, WARNING2 - ), - ) - .file("src/lib.rs", &format!("fn f() {{ {} }}", lib_src)) - .publish(); -} - -fn make_upstream(main_src: &str) -> Project { - project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.0.1" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("src/main.rs", &format!("fn main() {{ {} }}", main_src)) - .build() -} - -#[cargo_test] -fn no_warning_on_success() { - make_lib(""); - let upstream = make_upstream(""); - upstream - .cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] bar v0.0.1 ([..]) -[COMPILING] bar v0.0.1 -[COMPILING] foo v0.0.1 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn no_warning_on_bin_failure() { - make_lib(""); - let upstream = make_upstream("hi()"); - upstream - .cargo("build") - .with_status(101) - .with_stdout_does_not_contain("hidden stdout") - .with_stderr_does_not_contain("hidden stderr") - .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING1)) - .with_stderr_does_not_contain(&format!("[WARNING] {}", WARNING2)) - .with_stderr_contains("[UPDATING] `[..]` index") - .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])") - .with_stderr_contains("[COMPILING] bar v0.0.1") - .with_stderr_contains("[COMPILING] foo v0.0.1 ([..])") - .run(); -} - -#[cargo_test] -fn warning_on_lib_failure() { - make_lib("err()"); - let upstream = make_upstream(""); - upstream - .cargo("build") - .with_status(101) - .with_stdout_does_not_contain("hidden stdout") - .with_stderr_does_not_contain("hidden stderr") - .with_stderr_does_not_contain("[COMPILING] foo v0.0.1 ([..])") - .with_stderr_contains("[UPDATING] `[..]` index") - .with_stderr_contains("[DOWNLOADED] bar v0.0.1 ([..])") - .with_stderr_contains("[COMPILING] bar v0.0.1") - .with_stderr_contains(&format!("[WARNING] {}", WARNING1)) - .with_stderr_contains(&format!("[WARNING] {}", WARNING2)) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/weak_dep_features.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/weak_dep_features.rs deleted file mode 100644 index 42c4fcdc5..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/weak_dep_features.rs +++ /dev/null @@ -1,740 +0,0 @@ -//! Tests for weak-dep-features. - -use super::features2::switch_to_resolver_2; -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::registry::{Dependency, Package}; -use cargo_test_support::{project, publish}; -use std::fmt::Write; - -// Helper to create lib.rs files that check features. -fn require(enabled_features: &[&str], disabled_features: &[&str]) -> String { - let mut s = String::new(); - for feature in enabled_features { - writeln!(s, "#[cfg(not(feature=\"{feature}\"))] compile_error!(\"expected feature {feature} to be enabled\");", - feature=feature).unwrap(); - } - for feature in disabled_features { - writeln!(s, "#[cfg(feature=\"{feature}\")] compile_error!(\"did not expect feature {feature} to be enabled\");", - feature=feature).unwrap(); - } - s -} - -#[cargo_test] -fn gated() { - // Need -Z weak-dep-features to enable. - Package::new("bar", "1.0.0").feature("feat", &[]).publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional = true } - - [features] - f1 = ["bar?/feat"] - "#, - ) - .file("src/lib.rs", "") - .build(); - p.cargo("check") - .with_status(101) - .with_stderr( - "\ -error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` - -Caused by: - optional dependency features with `?` syntax are only allowed on the nightly \ - channel and requires the `-Z weak-dep-features` flag on the command line - Feature `f1` had feature value `bar?/feat`. -", - ) - .run(); -} - -#[cargo_test] -fn dependency_gate_ignored() { - // Dependencies with ? features in the registry are ignored in the - // registry if not on nightly. - Package::new("baz", "1.0.0").feature("feat", &[]).publish(); - Package::new("bar", "1.0.0") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("feat", &["baz?/feat"]) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] no matching package named `bar` found -location searched: registry `crates-io` -required by package `foo v0.1.0 ([..]/foo)` -", - ) - .run(); - - // Publish a version without the ? feature, it should ignore 1.0.0 - // and use this instead. - Package::new("bar", "1.0.1") - .add_dep(Dependency::new("baz", "1.0").optional(true)) - .feature("feat", &["baz"]) - .publish(); - p.cargo("check") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar [..] -[CHECKING] bar v1.0.1 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn simple() { - Package::new("bar", "1.0.0") - .feature("feat", &[]) - .file("src/lib.rs", &require(&["feat"], &[])) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional = true } - - [features] - f1 = ["bar?/feat"] - "#, - ) - .file("src/lib.rs", &require(&["f1"], &[])) - .build(); - - // It's a bit unfortunate that this has to download `bar`, but avoiding - // that is extremely difficult. - p.cargo("check -Z weak-dep-features --features f1") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.0 [..] -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("check -Z weak-dep-features --features f1,bar") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn deferred() { - // A complex chain that requires deferring enabling the feature due to - // another dependency getting enabled. - Package::new("bar", "1.0.0") - .feature("feat", &[]) - .file("src/lib.rs", &require(&["feat"], &[])) - .publish(); - Package::new("dep", "1.0.0") - .add_dep(Dependency::new("bar", "1.0").optional(true)) - .feature("feat", &["bar?/feat"]) - .publish(); - Package::new("bar_activator", "1.0.0") - .feature_dep("dep", "1.0", &["bar"]) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = { version = "1.0", features = ["feat"] } - bar_activator = "1.0" - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] dep v1.0.0 [..] -[DOWNLOADED] bar_activator v1.0.0 [..] -[DOWNLOADED] bar v1.0.0 [..] -[CHECKING] bar v1.0.0 -[CHECKING] dep v1.0.0 -[CHECKING] bar_activator v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn not_optional_dep() { - // Attempt to use dep_name?/feat where dep_name is not optional. - Package::new("dep", "1.0.0").feature("feat", &[]).publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - dep = "1.0" - - [features] - feat = ["dep?/feat"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("check -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr("\ -error: failed to parse manifest at `[ROOT]/foo/Cargo.toml` - -Caused by: - feature `feat` includes `dep?/feat` with a `?`, but `dep` is not an optional dependency - A non-optional dependency of the same name is defined; consider removing the `?` or changing the dependency to be optional -") - .run(); -} - -#[cargo_test] -fn optional_cli_syntax() { - // --features bar?/feat - Package::new("bar", "1.0.0") - .feature("feat", &[]) - .file("src/lib.rs", &require(&["feat"], &[])) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional = true } - "#, - ) - .file("src/lib.rs", "") - .build(); - - // Does not build bar. - p.cargo("check --features bar?/feat -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.0 [..] -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - // Builds bar. - p.cargo("check --features bar?/feat,bar -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - eprintln!("check V2 resolver"); - switch_to_resolver_2(&p); - p.build_dir().rm_rf(); - // Does not build bar. - p.cargo("check --features bar?/feat -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - // Builds bar. - p.cargo("check --features bar?/feat,bar -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn required_features() { - // required-features doesn't allow ? - Package::new("bar", "1.0.0").feature("feat", &[]).publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional = true } - - [[bin]] - name = "foo" - required-features = ["bar?/feat"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("check -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_status(101) - .with_stderr( - "\ -[UPDATING] [..] -[ERROR] invalid feature `bar?/feat` in required-features of target `foo`: \ -optional dependency with `?` is not allowed in required-features -", - ) - .run(); -} - -#[cargo_test] -fn weak_with_host_decouple() { - // -Z weak-opt-features with new resolver - // - // foo v0.1.0 - // โ””โ”€โ”€ common v1.0.0 - // โ””โ”€โ”€ bar v1.0.0 <-- does not have `feat` enabled - // [build-dependencies] - // โ””โ”€โ”€ bar_activator v1.0.0 - // โ””โ”€โ”€ common v1.0.0 - // โ””โ”€โ”€ bar v1.0.0 <-- does have `feat` enabled - Package::new("bar", "1.0.0") - .feature("feat", &[]) - .file( - "src/lib.rs", - r#" - pub fn feat() -> bool { - cfg!(feature = "feat") - } - "#, - ) - .publish(); - - Package::new("common", "1.0.0") - .add_dep(Dependency::new("bar", "1.0").optional(true)) - .feature("feat", &["bar?/feat"]) - .file( - "src/lib.rs", - r#" - #[cfg(feature = "bar")] - pub fn feat() -> bool { bar::feat() } - #[cfg(not(feature = "bar"))] - pub fn feat() -> bool { false } - "#, - ) - .publish(); - - Package::new("bar_activator", "1.0.0") - .feature_dep("common", "1.0", &["bar", "feat"]) - .file( - "src/lib.rs", - r#" - pub fn feat() -> bool { - common::feat() - } - "#, - ) - .publish(); - - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - resolver = "2" - - [dependencies] - common = { version = "1.0", features = ["feat"] } - - [build-dependencies] - bar_activator = "1.0" - "#, - ) - .file( - "src/main.rs", - r#" - fn main() { - assert!(!common::feat()); - } - "#, - ) - .file( - "build.rs", - r#" - fn main() { - assert!(bar_activator::feat()); - } - "#, - ) - .build(); - - p.cargo("run -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] [..] -[DOWNLOADED] [..] -[DOWNLOADED] [..] -[COMPILING] bar v1.0.0 -[COMPILING] common v1.0.0 -[COMPILING] bar_activator v1.0.0 -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -[RUNNING] `target/debug/foo[EXE]` -", - ) - .run(); -} - -#[cargo_test] -fn weak_namespaced() { - // Behavior with a dep: dependency. - Package::new("bar", "1.0.0") - .feature("feat", &[]) - .file("src/lib.rs", &require(&["feat"], &[])) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional = true } - - [features] - f1 = ["bar?/feat"] - f2 = ["dep:bar"] - "#, - ) - .file("src/lib.rs", &require(&["f1"], &["f2", "bar"])) - .build(); - - p.cargo("check -Z weak-dep-features -Z namespaced-features --features f1") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[DOWNLOADING] crates ... -[DOWNLOADED] bar v1.0.0 [..] -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); - - p.cargo("tree -Z weak-dep-features -Z namespaced-features -f") - .arg("{p} feats:{f}") - .masquerade_as_nightly_cargo() - .with_stdout("foo v0.1.0 ([ROOT]/foo) feats:") - .run(); - - p.cargo("tree -Z weak-dep-features -Z namespaced-features --features f1 -f") - .arg("{p} feats:{f}") - .masquerade_as_nightly_cargo() - .with_stdout("foo v0.1.0 ([ROOT]/foo) feats:f1") - .run(); - - p.cargo("tree -Z weak-dep-features -Z namespaced-features --features f1,f2 -f") - .arg("{p} feats:{f}") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -foo v0.1.0 ([ROOT]/foo) feats:f1,f2 -โ””โ”€โ”€ bar v1.0.0 feats:feat -", - ) - .run(); - - // "bar" remains not-a-feature - p.change_file("src/lib.rs", &require(&["f1", "f2"], &["bar"])); - - p.cargo("check -Z weak-dep-features -Z namespaced-features --features f1,f2") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[CHECKING] bar v1.0.0 -[CHECKING] foo v0.1.0 [..] -[FINISHED] [..] -", - ) - .run(); -} - -#[cargo_test] -fn tree() { - Package::new("bar", "1.0.0") - .feature("feat", &[]) - .file("src/lib.rs", &require(&["feat"], &[])) - .publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - bar = { version = "1.0", optional = true } - - [features] - f1 = ["bar?/feat"] - "#, - ) - .file("src/lib.rs", &require(&["f1"], &[])) - .build(); - - p.cargo("tree -Z weak-dep-features --features f1") - .masquerade_as_nightly_cargo() - .with_stdout("foo v0.1.0 ([ROOT]/foo)") - .run(); - - p.cargo("tree -Z weak-dep-features --features f1,bar") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -foo v0.1.0 ([ROOT]/foo) -โ””โ”€โ”€ bar v1.0.0 -", - ) - .run(); - - p.cargo("tree -Z weak-dep-features --features f1,bar -e features") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -foo v0.1.0 ([ROOT]/foo) -โ””โ”€โ”€ bar feature \"default\" - โ””โ”€โ”€ bar v1.0.0 -", - ) - .run(); - - p.cargo("tree -Z weak-dep-features --features f1,bar -e features -i bar") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -bar v1.0.0 -โ”œโ”€โ”€ bar feature \"default\" -โ”‚ โ””โ”€โ”€ foo v0.1.0 ([ROOT]/foo) -โ”‚ โ”œโ”€โ”€ foo feature \"bar\" (command-line) -โ”‚ โ”œโ”€โ”€ foo feature \"default\" (command-line) -โ”‚ โ””โ”€โ”€ foo feature \"f1\" (command-line) -โ””โ”€โ”€ bar feature \"feat\" - โ””โ”€โ”€ foo feature \"f1\" (command-line) -", - ) - .run(); - - p.cargo("tree -Z weak-dep-features -e features --features bar?/feat") - .masquerade_as_nightly_cargo() - .with_stdout("foo v0.1.0 ([ROOT]/foo)") - .run(); - - // This is a little strange in that it produces no output. - // Maybe `cargo tree` should print a note about why? - p.cargo("tree -Z weak-dep-features -e features -i bar --features bar?/feat") - .masquerade_as_nightly_cargo() - .with_stdout("") - .run(); - - p.cargo("tree -Z weak-dep-features -e features -i bar --features bar?/feat,bar") - .masquerade_as_nightly_cargo() - .with_stdout( - "\ -bar v1.0.0 -โ”œโ”€โ”€ bar feature \"default\" -โ”‚ โ””โ”€โ”€ foo v0.1.0 ([ROOT]/foo) -โ”‚ โ”œโ”€โ”€ foo feature \"bar\" (command-line) -โ”‚ โ””โ”€โ”€ foo feature \"default\" (command-line) -โ””โ”€โ”€ bar feature \"feat\" (command-line) -", - ) - .run(); -} - -#[cargo_test] -fn publish() { - // Publish behavior with /? syntax. - Package::new("bar", "1.0.0").feature("feat", &[]).publish(); - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - description = "foo" - license = "MIT" - homepage = "https://example.com/" - - [dependencies] - bar = { version = "1.0", optional = true } - - [features] - feat1 = [] - feat2 = ["bar?/feat"] - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("publish --token sekrit -Z weak-dep-features") - .masquerade_as_nightly_cargo() - .with_stderr( - "\ -[UPDATING] [..] -[PACKAGING] foo v0.1.0 [..] -[VERIFYING] foo v0.1.0 [..] -[COMPILING] foo v0.1.0 [..] -[FINISHED] [..] -[UPLOADING] foo v0.1.0 [..] -", - ) - .run(); - - publish::validate_upload_with_contents( - r#" - { - "authors": [], - "badges": {}, - "categories": [], - "deps": [ - { - "default_features": true, - "features": [], - "kind": "normal", - "name": "bar", - "optional": true, - "registry": "https://github.com/rust-lang/crates.io-index", - "target": null, - "version_req": "^1.0" - } - ], - "description": "foo", - "documentation": null, - "features": { - "feat1": [], - "feat2": ["bar?/feat"] - }, - "homepage": "https://example.com/", - "keywords": [], - "license": "MIT", - "license_file": null, - "links": null, - "name": "foo", - "readme": null, - "readme_file": null, - "repository": null, - "vers": "0.1.0" - } - "#, - "foo-0.1.0.crate", - &["Cargo.toml", "Cargo.toml.orig", "src/lib.rs"], - &[( - "Cargo.toml", - &format!( - r#"{} -[package] -name = "foo" -version = "0.1.0" -description = "foo" -homepage = "https://example.com/" -license = "MIT" -[dependencies.bar] -version = "1.0" -optional = true - -[features] -feat1 = [] -feat2 = ["bar?/feat"] -"#, - cargo::core::package::MANIFEST_PREAMBLE - ), - )], - ); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/workspaces.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/workspaces.rs deleted file mode 100644 index 777c6d13a..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/workspaces.rs +++ /dev/null @@ -1,2451 +0,0 @@ -//! Tests for workspaces. - -use cargo_test_support::registry::Package; -use cargo_test_support::{basic_lib_manifest, basic_manifest, git, project, sleep_ms}; -use std::env; -use std::fs; - -#[cargo_test] -fn simple_explicit() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = ".." - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - assert!(!p.bin("bar").is_file()); - - p.cargo("build").cwd("bar").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("bar").is_file()); - - assert!(p.root().join("Cargo.lock").is_file()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); -} - -#[cargo_test] -fn simple_explicit_default_members() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - default-members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = ".." - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.bin("bar").is_file()); - assert!(!p.bin("foo").is_file()); -} - -#[cargo_test] -fn non_virtual_default_members_build_other_member() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = [".", "bar", "baz"] - default-members = ["baz"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build") - .with_stderr( - "[..] Compiling baz v0.1.0 ([..])\n\ - [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); - - p.cargo("build --manifest-path bar/Cargo.toml") - .with_stderr( - "[..] Compiling bar v0.1.0 ([..])\n\ - [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn inferred_root() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - assert!(!p.bin("bar").is_file()); - - p.cargo("build").cwd("bar").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("bar").is_file()); - - assert!(p.root().join("Cargo.lock").is_file()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); -} - -#[cargo_test] -fn inferred_path_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}") - .file("bar/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - assert!(!p.bin("bar").is_file()); - - p.cargo("build").cwd("bar").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("bar").is_file()); - - assert!(p.root().join("Cargo.lock").is_file()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); -} - -#[cargo_test] -fn transitive_path_dep() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "bar" } - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - baz = { path = "../baz" } - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .file("bar/src/lib.rs", "") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/main.rs", "fn main() {}") - .file("baz/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - assert!(!p.bin("bar").is_file()); - assert!(!p.bin("baz").is_file()); - - p.cargo("build").cwd("bar").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("bar").is_file()); - assert!(!p.bin("baz").is_file()); - - p.cargo("build").cwd("baz").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("bar").is_file()); - assert!(p.bin("baz").is_file()); - - assert!(p.root().join("Cargo.lock").is_file()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); - assert!(!p.root().join("baz/Cargo.lock").is_file()); -} - -#[cargo_test] -fn parent_pointer_works() { - let p = project() - .file( - "foo/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "../bar" } - - [workspace] - "#, - ) - .file("foo/src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = "../foo" - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .file("bar/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").cwd("foo").run(); - p.cargo("build").cwd("bar").run(); - assert!(p.root().join("foo/Cargo.lock").is_file()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); -} - -#[cargo_test] -fn same_names_in_workspace() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - workspace = ".." - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: two packages named `foo` in this workspace: -- [..]Cargo.toml -- [..]Cargo.toml -", - ) - .run(); -} - -#[cargo_test] -fn parent_doesnt_point_to_child() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .cwd("bar") - .with_status(101) - .with_stderr( - "\ -error: current package believes it's in a workspace when it's not: -current: [..]Cargo.toml -workspace: [..]Cargo.toml - -this may be fixable [..] -[..] -", - ) - .run(); -} - -#[cargo_test] -fn invalid_parent_pointer() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - workspace = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: failed to read `[..]Cargo.toml` - -Caused by: - [..] -", - ) - .run(); -} - -#[cargo_test] -fn invalid_members() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["foo"] - "#, - ) - .file("src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to load manifest for workspace member `[..]/foo` - -Caused by: - failed to read `[..]foo/foo/Cargo.toml` - -Caused by: - [..] -", - ) - .run(); -} - -#[cargo_test] -fn bare_workspace_ok() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - "#, - ) - .file("src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn two_roots() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - - [workspace] - members = [".."] - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: multiple workspace roots found in the same workspace: - [..] - [..] -", - ) - .run(); -} - -#[cargo_test] -fn workspace_isnt_root() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - workspace = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr("error: root of a workspace inferred but wasn't a root: [..]") - .run(); -} - -#[cargo_test] -fn dangling_member() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = "../baz" - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .file( - "baz/Cargo.toml", - r#" - [project] - name = "baz" - version = "0.1.0" - authors = [] - workspace = "../baz" - "#, - ) - .file("baz/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: package `[..]` is a member of the wrong workspace -expected: [..] -actual: [..] -", - ) - .run(); -} - -#[cargo_test] -fn cycle() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - workspace = "bar" - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = ".." - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "[ERROR] root of a workspace inferred but wasn't a root: [..]/foo/bar/Cargo.toml", - ) - .run(); -} - -#[cargo_test] -fn share_dependencies() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - dep1 = "0.1" - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - dep1 = "< 0.1.5" - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - Package::new("dep1", "0.1.3").publish(); - Package::new("dep1", "0.1.8").publish(); - - p.cargo("build") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] dep1 v0.1.3 ([..]) -[COMPILING] dep1 v0.1.3 -[COMPILING] foo v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn fetch_fetches_all() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - dep1 = "*" - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - Package::new("dep1", "0.1.3").publish(); - - p.cargo("fetch") - .with_stderr( - "\ -[UPDATING] `[..]` index -[DOWNLOADING] crates ... -[DOWNLOADED] dep1 v0.1.3 ([..]) -", - ) - .run(); -} - -#[cargo_test] -fn lock_works_for_everyone() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - dep2 = "0.1" - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - - [dependencies] - dep1 = "0.1" - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - Package::new("dep1", "0.1.0").publish(); - Package::new("dep2", "0.1.0").publish(); - - p.cargo("generate-lockfile") - .with_stderr("[UPDATING] `[..]` index") - .run(); - - Package::new("dep1", "0.1.1").publish(); - Package::new("dep2", "0.1.1").publish(); - - p.cargo("build") - .with_stderr( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] dep2 v0.1.0 ([..]) -[COMPILING] dep2 v0.1.0 -[COMPILING] foo v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - p.cargo("build") - .cwd("bar") - .with_stderr( - "\ -[DOWNLOADING] crates ... -[DOWNLOADED] dep1 v0.1.0 ([..]) -[COMPILING] dep1 v0.1.0 -[COMPILING] bar v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); -} - -#[cargo_test] -fn virtual_works() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - p.cargo("build").cwd("bar").run(); - assert!(p.root().join("Cargo.lock").is_file()); - assert!(p.bin("bar").is_file()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); -} - -#[cargo_test] -fn explicit_package_argument_works_with_virtual_manifest() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - p.cargo("build --package bar").run(); - assert!(p.root().join("Cargo.lock").is_file()); - assert!(p.bin("bar").is_file()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); -} - -#[cargo_test] -fn virtual_misconfigure() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - p.cargo("build") - .cwd("bar") - .with_status(101) - .with_stderr( - "\ -error: current package believes it's in a workspace when it's not: -current: [CWD]/Cargo.toml -workspace: [..]Cargo.toml - -this may be fixable by adding `bar` to the `workspace.members` array of the \ -manifest located at: [..] -[..] -", - ) - .run(); -} - -#[cargo_test] -fn virtual_build_all_implied() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - p.cargo("build").run(); -} - -#[cargo_test] -fn virtual_default_members() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - default-members = ["bar"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}") - .file("baz/src/main.rs", "fn main() {}"); - let p = p.build(); - p.cargo("build").run(); - assert!(p.bin("bar").is_file()); - assert!(!p.bin("baz").is_file()); -} - -#[cargo_test] -fn virtual_default_member_is_not_a_member() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar"] - default-members = ["something-else"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: package `[..]something-else` is listed in workspaceโ€™s default-members \ -but is not a member. -", - ) - .run(); -} - -#[cargo_test] -fn virtual_default_members_build_other_member() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["bar", "baz"] - default-members = ["baz"] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", "pub fn bar() {}") - .file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("baz/src/lib.rs", "pub fn baz() {}") - .build(); - - p.cargo("build --manifest-path bar/Cargo.toml") - .with_stderr( - "[..] Compiling bar v0.1.0 ([..])\n\ - [..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n", - ) - .run(); -} - -#[cargo_test] -fn virtual_build_no_members() { - let p = project().file( - "Cargo.toml", - r#" - [workspace] - "#, - ); - let p = p.build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: manifest path `[..]` contains no package: The manifest is virtual, \ -and the workspace has no members. -", - ) - .run(); -} - -#[cargo_test] -fn include_virtual() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "") - .file( - "bar/Cargo.toml", - r#" - [workspace] - "#, - ); - let p = p.build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: multiple workspace roots found in the same workspace: - [..] - [..] -", - ) - .run(); -} - -#[cargo_test] -fn members_include_path_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["p1"] - - [dependencies] - p3 = { path = "p3" } - "#, - ) - .file("src/lib.rs", "") - .file( - "p1/Cargo.toml", - r#" - [project] - name = "p1" - version = "0.1.0" - authors = [] - - [dependencies] - p2 = { path = "../p2" } - "#, - ) - .file("p1/src/lib.rs", "") - .file("p2/Cargo.toml", &basic_manifest("p2", "0.1.0")) - .file("p2/src/lib.rs", "") - .file("p3/Cargo.toml", &basic_manifest("p3", "0.1.0")) - .file("p3/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").cwd("p1").run(); - p.cargo("build").cwd("p2").run(); - p.cargo("build").cwd("p3").run(); - p.cargo("build").run(); - - assert!(p.root().join("target").is_dir()); - assert!(!p.root().join("p1/target").is_dir()); - assert!(!p.root().join("p2/target").is_dir()); - assert!(!p.root().join("p3/target").is_dir()); -} - -#[cargo_test] -fn new_warns_you_this_will_not_work() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - "#, - ) - .file("src/lib.rs", ""); - let p = p.build(); - - p.cargo("new --lib bar") - .with_stderr( - "\ -warning: compiling this new package may not work due to invalid workspace configuration - -current package believes it's in a workspace when it's not: -current: [..] -workspace: [..] - -this may be fixable by ensuring that this crate is depended on by the workspace \ -root: [..] -[..] -[CREATED] library `bar` package -", - ) - .run(); -} - -#[cargo_test] -fn new_warning_with_corrupt_ws() { - let p = project().file("Cargo.toml", "asdf").build(); - p.cargo("new bar") - .with_stderr( - "\ -[WARNING] compiling this new package may not work due to invalid workspace configuration - -failed to parse manifest at `[..]foo/Cargo.toml` - -Caused by: - could not parse input as TOML - -Caused by: - expected an equals, found eof at line 1 column 5 - Created binary (application) `bar` package -", - ) - .run(); -} - -#[cargo_test] -fn lock_doesnt_change_depending_on_crate() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ['baz'] - - [dependencies] - foo = "*" - "#, - ) - .file("src/lib.rs", "") - .file( - "baz/Cargo.toml", - r#" - [project] - name = "baz" - version = "0.1.0" - authors = [] - - [dependencies] - bar = "*" - "#, - ) - .file("baz/src/lib.rs", ""); - let p = p.build(); - - Package::new("foo", "1.0.0").publish(); - Package::new("bar", "1.0.0").publish(); - - p.cargo("build").run(); - - let lockfile = p.read_lockfile(); - - p.cargo("build").cwd("baz").run(); - - let lockfile2 = p.read_lockfile(); - - assert_eq!(lockfile, lockfile2); -} - -#[cargo_test] -fn rebuild_please() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ['lib', 'bin'] - "#, - ) - .file("lib/Cargo.toml", &basic_manifest("lib", "0.1.0")) - .file( - "lib/src/lib.rs", - r#" - pub fn foo() -> u32 { 0 } - "#, - ) - .file( - "bin/Cargo.toml", - r#" - [package] - name = "bin" - version = "0.1.0" - - [dependencies] - lib = { path = "../lib" } - "#, - ) - .file( - "bin/src/main.rs", - r#" - extern crate lib; - - fn main() { - assert_eq!(lib::foo(), 0); - } - "#, - ); - let p = p.build(); - - p.cargo("run").cwd("bin").run(); - - sleep_ms(1000); - - p.change_file("lib/src/lib.rs", "pub fn foo() -> u32 { 1 }"); - - p.cargo("build").cwd("lib").run(); - - p.cargo("run") - .cwd("bin") - .with_status(101) - .with_stderr_contains("[..]assertion[..]") - .run(); -} - -#[cargo_test] -fn workspace_in_git() { - let git_project = git::new("dep1", |project| { - project - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - "#, - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "") - }); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "lib" - version = "0.1.0" - - [dependencies.foo] - git = '{}' - "#, - git_project.url() - ), - ) - .file( - "src/lib.rs", - r#" - pub fn foo() -> u32 { 0 } - "#, - ); - let p = p.build(); - - p.cargo("build").run(); -} - -#[cargo_test] -fn lockfile_can_specify_nonexistant_members() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - "#, - ) - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/main.rs", "fn main() {}") - .file( - "Cargo.lock", - r#" - [[package]] - name = "a" - version = "0.1.0" - - [[package]] - name = "b" - version = "0.1.0" - "#, - ); - - let p = p.build(); - - p.cargo("build").cwd("a").run(); -} - -#[cargo_test] -fn you_cannot_generate_lockfile_for_empty_workspaces() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - "#, - ) - .file("bar/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("update") - .with_status(101) - .with_stderr("error: you can't generate a lockfile for an empty workspace.") - .run(); -} - -#[cargo_test] -fn workspace_with_transitive_dev_deps() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.5.0" - authors = ["mbrubeck@example.com"] - - [dependencies.bar] - path = "bar" - - [workspace] - "#, - ) - .file("src/main.rs", r#"fn main() {}"#) - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.5.0" - authors = ["mbrubeck@example.com"] - - [dev-dependencies.baz] - path = "../baz" - "#, - ) - .file( - "bar/src/lib.rs", - r#" - pub fn init() {} - - #[cfg(test)] - - #[test] - fn test() { - extern crate baz; - baz::do_stuff(); - } - "#, - ) - .file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0")) - .file("baz/src/lib.rs", r#"pub fn do_stuff() {}"#); - let p = p.build(); - - p.cargo("test -p bar").run(); -} - -#[cargo_test] -fn error_if_parent_cargo_toml_is_invalid() { - let p = project() - .file("Cargo.toml", "Totally not a TOML file") - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .cwd("bar") - .with_status(101) - .with_stderr_contains("[ERROR] failed to parse manifest at `[..]`") - .run(); -} - -#[cargo_test] -fn relative_path_for_member_works() { - let p = project() - .file( - "foo/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["../bar"] - "#, - ) - .file("foo/src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = "../foo" - "#, - ) - .file("bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build").cwd("foo").run(); - p.cargo("build").cwd("bar").run(); -} - -#[cargo_test] -fn relative_path_for_root_works() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - - [dependencies] - subproj = { path = "./subproj" } - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("subproj/Cargo.toml", &basic_manifest("subproj", "0.1.0")) - .file("subproj/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build --manifest-path ./Cargo.toml").run(); - - p.cargo("build --manifest-path ../Cargo.toml") - .cwd("subproj") - .run(); -} - -#[cargo_test] -fn path_dep_outside_workspace_is_not_member() { - let p = project() - .no_manifest() - .file( - "ws/Cargo.toml", - r#" - [project] - name = "ws" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { path = "../foo" } - - [workspace] - "#, - ) - .file("ws/src/lib.rs", "extern crate foo;") - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").cwd("ws").run(); -} - -#[cargo_test] -fn test_in_and_out_of_workspace() { - let p = project() - .no_manifest() - .file( - "ws/Cargo.toml", - r#" - [project] - name = "ws" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { path = "../foo" } - - [workspace] - members = [ "../bar" ] - "#, - ) - .file("ws/src/lib.rs", "extern crate foo; pub fn f() { foo::f() }") - .file( - "foo/Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "../bar" } - "#, - ) - .file( - "foo/src/lib.rs", - "extern crate bar; pub fn f() { bar::f() }", - ) - .file( - "bar/Cargo.toml", - r#" - [project] - workspace = "../ws" - name = "bar" - version = "0.1.0" - authors = [] - "#, - ) - .file("bar/src/lib.rs", "pub fn f() { }"); - let p = p.build(); - - p.cargo("build").cwd("ws").run(); - - assert!(p.root().join("ws/Cargo.lock").is_file()); - assert!(p.root().join("ws/target").is_dir()); - assert!(!p.root().join("foo/Cargo.lock").is_file()); - assert!(!p.root().join("foo/target").is_dir()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); - assert!(!p.root().join("bar/target").is_dir()); - - p.cargo("build").cwd("foo").run(); - assert!(p.root().join("foo/Cargo.lock").is_file()); - assert!(p.root().join("foo/target").is_dir()); - assert!(!p.root().join("bar/Cargo.lock").is_file()); - assert!(!p.root().join("bar/target").is_dir()); -} - -#[cargo_test] -fn test_path_dependency_under_member() { - let p = project() - .file( - "ws/Cargo.toml", - r#" - [project] - name = "ws" - version = "0.1.0" - authors = [] - - [dependencies] - foo = { path = "../foo" } - - [workspace] - "#, - ) - .file("ws/src/lib.rs", "extern crate foo; pub fn f() { foo::f() }") - .file( - "foo/Cargo.toml", - r#" - [project] - workspace = "../ws" - name = "foo" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "./bar" } - "#, - ) - .file( - "foo/src/lib.rs", - "extern crate bar; pub fn f() { bar::f() }", - ) - .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("foo/bar/src/lib.rs", "pub fn f() { }"); - let p = p.build(); - - p.cargo("build").cwd("ws").run(); - - assert!(!p.root().join("foo/bar/Cargo.lock").is_file()); - assert!(!p.root().join("foo/bar/target").is_dir()); - - p.cargo("build").cwd("foo/bar").run(); - - assert!(!p.root().join("foo/bar/Cargo.lock").is_file()); - assert!(!p.root().join("foo/bar/target").is_dir()); -} - -#[cargo_test] -fn excluded_simple() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "ws" - version = "0.1.0" - authors = [] - - [workspace] - exclude = ["foo"] - "#, - ) - .file("src/lib.rs", "") - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.root().join("target").is_dir()); - p.cargo("build").cwd("foo").run(); - assert!(p.root().join("foo/target").is_dir()); -} - -#[cargo_test] -fn exclude_members_preferred() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "ws" - version = "0.1.0" - authors = [] - - [workspace] - members = ["foo/bar"] - exclude = ["foo"] - "#, - ) - .file("src/lib.rs", "") - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "") - .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("foo/bar/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.root().join("target").is_dir()); - p.cargo("build").cwd("foo").run(); - assert!(p.root().join("foo/target").is_dir()); - p.cargo("build").cwd("foo/bar").run(); - assert!(!p.root().join("foo/bar/target").is_dir()); -} - -#[cargo_test] -fn exclude_but_also_depend() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "ws" - version = "0.1.0" - authors = [] - - [dependencies] - bar = { path = "foo/bar" } - - [workspace] - exclude = ["foo"] - "#, - ) - .file("src/lib.rs", "") - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "") - .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("foo/bar/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.root().join("target").is_dir()); - p.cargo("build").cwd("foo").run(); - assert!(p.root().join("foo/target").is_dir()); - p.cargo("build").cwd("foo/bar").run(); - assert!(p.root().join("foo/bar/target").is_dir()); -} - -#[cargo_test] -fn excluded_default_members_still_must_be_members() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo"] - default-members = ["foo", "bar"] - exclude = ["bar"] - "#, - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "") - .file("bar/something.txt", ""); - let p = p.build(); - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -error: package `[..]bar` is listed in workspaceโ€™s default-members \ -but is not a member. -", - ) - .run(); -} - -#[cargo_test] -fn excluded_default_members_crate_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar/*"] - default-members = ["bar/*"] - exclude = ["bar/quux"] - "#, - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/main.rs", "fn main() {}") - .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("bar/baz/src/main.rs", "fn main() {}") - .file("bar/quux/Cargo.toml", &basic_manifest("quux", "0.1.0")) - .file("bar/quux/src/main.rs", "fn main() {}"); - - let p = p.build(); - p.cargo("build").run(); - - assert!(p.root().join("target").is_dir()); - assert!(!p.bin("foo").is_file()); - assert!(p.bin("baz").is_file()); - assert!(!p.bin("quux").exists()); - - p.cargo("build --workspace").run(); - assert!(p.root().join("target").is_dir()); - assert!(p.bin("foo").is_file()); - assert!(!p.bin("quux").exists()); - - p.cargo("build").cwd("bar/quux").run(); - assert!(p.root().join("bar/quux/target").is_dir()); -} - -#[cargo_test] -fn excluded_default_members_not_crate_glob() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar/*"] - default-members = ["bar/*"] - exclude = ["bar/docs"] - "#, - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/main.rs", "fn main() {}") - .file("bar/baz/Cargo.toml", &basic_manifest("baz", "0.1.0")) - .file("bar/baz/src/main.rs", "fn main() {}") - .file("bar/docs/readme.txt", "This folder is not a crate!"); - - let p = p.build(); - p.cargo("build").run(); - - assert!(!p.bin("foo").is_file()); - assert!(p.bin("baz").is_file()); - p.cargo("build --workspace").run(); - assert!(p.bin("foo").is_file()); -} - -#[cargo_test] -fn glob_syntax() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["crates/*"] - exclude = ["crates/qux"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "crates/bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = "../.." - "#, - ) - .file("crates/bar/src/main.rs", "fn main() {}") - .file( - "crates/baz/Cargo.toml", - r#" - [project] - name = "baz" - version = "0.1.0" - authors = [] - workspace = "../.." - "#, - ) - .file("crates/baz/src/main.rs", "fn main() {}") - .file( - "crates/qux/Cargo.toml", - r#" - [project] - name = "qux" - version = "0.1.0" - authors = [] - "#, - ) - .file("crates/qux/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - assert!(!p.bin("bar").is_file()); - assert!(!p.bin("baz").is_file()); - - p.cargo("build").cwd("crates/bar").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("bar").is_file()); - - p.cargo("build").cwd("crates/baz").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("baz").is_file()); - - p.cargo("build").cwd("crates/qux").run(); - assert!(!p.bin("qux").is_file()); - - assert!(p.root().join("Cargo.lock").is_file()); - assert!(!p.root().join("crates/bar/Cargo.lock").is_file()); - assert!(!p.root().join("crates/baz/Cargo.lock").is_file()); - assert!(p.root().join("crates/qux/Cargo.lock").is_file()); -} - -/*FIXME: This fails because of how workspace.exclude and workspace.members are working. -#[cargo_test] -fn glob_syntax_2() { - let p = project() - .file("Cargo.toml", r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["crates/b*"] - exclude = ["crates/q*"] - "#) - .file("src/main.rs", "fn main() {}") - .file("crates/bar/Cargo.toml", r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = "../.." - "#) - .file("crates/bar/src/main.rs", "fn main() {}") - .file("crates/baz/Cargo.toml", r#" - [project] - name = "baz" - version = "0.1.0" - authors = [] - workspace = "../.." - "#) - .file("crates/baz/src/main.rs", "fn main() {}") - .file("crates/qux/Cargo.toml", r#" - [project] - name = "qux" - version = "0.1.0" - authors = [] - "#) - .file("crates/qux/src/main.rs", "fn main() {}"); - p.build(); - - p.cargo("build").run(); - assert!(p.bin("foo").is_file()); - assert!(!p.bin("bar").is_file()); - assert!(!p.bin("baz").is_file()); - - p.cargo("build").cwd("crates/bar").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("bar").is_file()); - - p.cargo("build").cwd("crates/baz").run(); - assert!(p.bin("foo").is_file()); - assert!(p.bin("baz").is_file()); - - p.cargo("build").cwd("crates/qux").run(); - assert!(!p.bin("qux").is_file()); - - assert!(p.root().join("Cargo.lock").is_file()); - assert!(!p.root().join("crates/bar/Cargo.lock").is_file()); - assert!(!p.root().join("crates/baz/Cargo.lock").is_file()); - assert!(p.root().join("crates/qux/Cargo.lock").is_file()); -} -*/ - -#[cargo_test] -fn glob_syntax_invalid_members() { - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["crates/*"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file("crates/bar/src/main.rs", "fn main() {}"); - let p = p.build(); - - p.cargo("build") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to load manifest for workspace member `[..]/crates/bar` - -Caused by: - failed to read `[..]foo/crates/bar/Cargo.toml` - -Caused by: - [..] -", - ) - .run(); -} - -/// This is a freshness test for feature use with workspaces. -/// -/// `feat_lib` is used by `caller1` and `caller2`, but with different features enabled. -/// This test ensures that alternating building `caller1`, `caller2` doesn't force -/// recompile of `feat_lib`. -/// -/// Ideally, once we solve rust-lang/cargo#3620, then a single Cargo build at the top level -/// will be enough. -#[cargo_test] -fn dep_used_with_separate_features() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["feat_lib", "caller1", "caller2"] - "#, - ) - .file( - "feat_lib/Cargo.toml", - r#" - [project] - name = "feat_lib" - version = "0.1.0" - authors = [] - - [features] - myfeature = [] - "#, - ) - .file("feat_lib/src/lib.rs", "") - .file( - "caller1/Cargo.toml", - r#" - [project] - name = "caller1" - version = "0.1.0" - authors = [] - - [dependencies] - feat_lib = { path = "../feat_lib" } - "#, - ) - .file("caller1/src/main.rs", "fn main() {}") - .file("caller1/src/lib.rs", "") - .file( - "caller2/Cargo.toml", - r#" - [project] - name = "caller2" - version = "0.1.0" - authors = [] - - [dependencies] - feat_lib = { path = "../feat_lib", features = ["myfeature"] } - caller1 = { path = "../caller1" } - "#, - ) - .file("caller2/src/main.rs", "fn main() {}") - .file("caller2/src/lib.rs", ""); - let p = p.build(); - - // Build the entire workspace. - p.cargo("build --workspace") - .with_stderr( - "\ -[..]Compiling feat_lib v0.1.0 ([..]) -[..]Compiling caller1 v0.1.0 ([..]) -[..]Compiling caller2 v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - assert!(p.bin("caller1").is_file()); - assert!(p.bin("caller2").is_file()); - - // Build `caller1`. Should build the dep library. Because the features - // are different than the full workspace, it rebuilds. - // Ideally once we solve rust-lang/cargo#3620, then a single Cargo build at the top level - // will be enough. - p.cargo("build") - .cwd("caller1") - .with_stderr( - "\ -[..]Compiling feat_lib v0.1.0 ([..]) -[..]Compiling caller1 v0.1.0 ([..]) -[FINISHED] dev [unoptimized + debuginfo] target(s) in [..] -", - ) - .run(); - - // Alternate building `caller2`/`caller1` a few times, just to make sure - // features are being built separately. Should not rebuild anything. - p.cargo("build") - .cwd("caller2") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - p.cargo("build") - .cwd("caller1") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); - p.cargo("build") - .cwd("caller2") - .with_stderr("[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]") - .run(); -} - -#[cargo_test] -fn dont_recurse_out_of_cargo_home() { - let git_project = git::new("dep", |project| { - project - .file("Cargo.toml", &basic_manifest("dep", "0.1.0")) - .file("src/lib.rs", "") - .file( - "build.rs", - r#" - use std::env; - use std::path::Path; - use std::process::{self, Command}; - - fn main() { - let cargo = env::var_os("CARGO").unwrap(); - let cargo_manifest_dir = env::var_os("CARGO_MANIFEST_DIR").unwrap(); - let output = Command::new(cargo) - .args(&["metadata", "--format-version", "1", "--manifest-path"]) - .arg(&Path::new(&cargo_manifest_dir).join("Cargo.toml")) - .output() - .unwrap(); - if !output.status.success() { - eprintln!("{}", String::from_utf8(output.stderr).unwrap()); - process::exit(1); - } - } - "#, - ) - }); - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies.dep] - git = "{}" - - [workspace] - "#, - git_project.url() - ), - ) - .file("src/lib.rs", ""); - let p = p.build(); - - p.cargo("build") - .env("CARGO_HOME", p.root().join(".cargo")) - .run(); -} - -// FIXME: this fails because of how workspace.exclude and workspace.members are working. -/* -#[cargo_test] -fn include_and_exclude() { - let p = project() - .file("Cargo.toml", r#" - [workspace] - members = ["foo"] - exclude = ["foo/bar"] - "#) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", "") - .file("foo/bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("foo/bar/src/lib.rs", ""); - p.build(); - - p.cargo("build").cwd("foo").run(); - assert!(p.root().join("target").is_dir()); - assert!(!p.root().join("foo/target").is_dir()); - p.cargo("build").cwd("foo/bar").run(); - assert!(p.root().join("foo/bar/target").is_dir()); -} -*/ - -#[cargo_test] -fn cargo_home_at_root_works() { - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["a"] - "#, - ) - .file("src/lib.rs", "") - .file("a/Cargo.toml", &basic_manifest("a", "0.1.0")) - .file("a/src/lib.rs", ""); - let p = p.build(); - - p.cargo("build").run(); - p.cargo("build --frozen").env("CARGO_HOME", p.root()).run(); -} - -#[cargo_test] -fn relative_rustc() { - let p = project() - .file( - "src/main.rs", - r#" - use std::process::Command; - use std::env; - - fn main() { - let mut cmd = Command::new("rustc"); - for arg in env::args_os().skip(1) { - cmd.arg(arg); - } - std::process::exit(cmd.status().unwrap().code().unwrap()); - } - "#, - ) - .build(); - p.cargo("build").run(); - - let src = p - .root() - .join("target/debug/foo") - .with_extension(env::consts::EXE_EXTENSION); - - Package::new("a", "0.1.0").publish(); - - let p = project() - .at("lib") - .file( - "Cargo.toml", - r#" - [package] - name = "lib" - version = "0.1.0" - - [dependencies] - a = "0.1" - "#, - ) - .file("src/lib.rs", "") - .build(); - - fs::copy(&src, p.root().join(src.file_name().unwrap())).unwrap(); - - let file = format!("./foo{}", env::consts::EXE_SUFFIX); - p.cargo("build").env("RUSTC", &file).run(); -} - -#[cargo_test] -fn ws_rustc_err() { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - "#, - ) - .file("a/Cargo.toml", &basic_lib_manifest("a")) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("rustc") - .with_status(101) - .with_stderr("[ERROR] [..]against an actual package[..]") - .run(); - - p.cargo("rustdoc") - .with_status(101) - .with_stderr("[ERROR] [..]against an actual package[..]") - .run(); -} - -#[cargo_test] -fn ws_err_unused() { - for key in &[ - "[lib]", - "[[bin]]", - "[[example]]", - "[[test]]", - "[[bench]]", - "[dependencies]", - "[dev-dependencies]", - "[build-dependencies]", - "[features]", - "[target]", - "[badges]", - ] { - let p = project() - .file( - "Cargo.toml", - &format!( - r#" - [workspace] - members = ["a"] - - {} - "#, - key - ), - ) - .file("a/Cargo.toml", &basic_lib_manifest("a")) - .file("a/src/lib.rs", "") - .build(); - p.cargo("check") - .with_status(101) - .with_stderr(&format!( - "\ -[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` - -Caused by: - this virtual manifest specifies a {} section, which is not allowed -", - key - )) - .run(); - } -} - -#[cargo_test] -fn ws_warn_unused() { - for (key, name) in &[ - ("[profile.dev]\nopt-level = 1", "profiles"), - ("[replace]\n\"bar:0.1.0\" = { path = \"bar\" }", "replace"), - ("[patch.crates-io]\nbar = { path = \"bar\" }", "patch"), - ] { - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - "#, - ) - .file( - "a/Cargo.toml", - &format!( - r#" - [package] - name = "a" - version = "0.1.0" - - {} - "#, - key - ), - ) - .file("a/src/lib.rs", "") - .build(); - p.cargo("check") - .with_stderr_contains(&format!( - "\ -[WARNING] {} for the non root package will be ignored, specify {} at the workspace root: -package: [..]/foo/a/Cargo.toml -workspace: [..]/foo/Cargo.toml -", - name, name - )) - .run(); - } -} - -#[cargo_test] -fn ws_warn_path() { - // Warnings include path to manifest. - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["a"] - "#, - ) - .file( - "a/Cargo.toml", - r#" - cargo-features = ["edition"] - [package] - name = "foo" - version = "0.1.0" - "#, - ) - .file("a/src/lib.rs", "") - .build(); - - p.cargo("check") - .with_stderr_contains("[WARNING] [..]/foo/a/Cargo.toml: the cargo feature `edition`[..]") - .run(); -} - -#[cargo_test] -fn invalid_missing() { - // Make sure errors are not suppressed with -q. - let p = project() - .file( - "Cargo.toml", - r#" - [package] - name = "foo" - version = "0.1.0" - - [dependencies] - x = { path = 'x' } - "#, - ) - .file("src/lib.rs", "") - .build(); - - p.cargo("build -q") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to get `x` as a dependency of package `foo v0.1.0 [..]` - -Caused by: - failed to load source for dependency `x` - -Caused by: - Unable to update [..]/foo/x - -Caused by: - failed to read `[..]foo/x/Cargo.toml` - -Caused by: - [..] -", - ) - .run(); -} - -#[cargo_test] -fn member_dep_missing() { - // Make sure errors are not suppressed with -q. - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - - [workspace] - members = ["bar"] - "#, - ) - .file("src/main.rs", "fn main() {}") - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - - [dependencies] - baz = { path = "baz" } - "#, - ) - .file("bar/src/main.rs", "fn main() {}") - .build(); - - p.cargo("build -q") - .with_status(101) - .with_stderr( - "\ -[ERROR] failed to load manifest for workspace member `[..]/bar` - -Caused by: - failed to load manifest for dependency `baz` - -Caused by: - failed to read `[..]foo/bar/baz/Cargo.toml` - -Caused by: - [..] -", - ) - .run(); -} - -#[cargo_test] -fn simple_primary_package_env_var() { - let is_primary_package = r#" - #[test] - fn verify_primary_package() {{ - assert!(option_env!("CARGO_PRIMARY_PACKAGE").is_some()); - }} - "#; - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.1.0" - authors = [] - - [workspace] - members = ["bar"] - "#, - ) - .file("src/lib.rs", is_primary_package) - .file( - "bar/Cargo.toml", - r#" - [project] - name = "bar" - version = "0.1.0" - authors = [] - workspace = ".." - "#, - ) - .file("bar/src/lib.rs", is_primary_package); - let p = p.build(); - - p.cargo("test").run(); - - // Again, this time selecting a specific crate - p.cargo("clean").run(); - p.cargo("test -p bar").run(); - - // Again, this time selecting all crates - p.cargo("clean").run(); - p.cargo("test --all").run(); -} - -#[cargo_test] -fn virtual_primary_package_env_var() { - let is_primary_package = r#" - #[test] - fn verify_primary_package() {{ - assert!(option_env!("CARGO_PRIMARY_PACKAGE").is_some()); - }} - "#; - - let p = project() - .file( - "Cargo.toml", - r#" - [workspace] - members = ["foo", "bar"] - "#, - ) - .file("foo/Cargo.toml", &basic_manifest("foo", "0.1.0")) - .file("foo/src/lib.rs", is_primary_package) - .file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0")) - .file("bar/src/lib.rs", is_primary_package); - let p = p.build(); - - p.cargo("test").run(); - - // Again, this time selecting a specific crate - p.cargo("clean").run(); - p.cargo("test -p foo").run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/yank.rs b/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/yank.rs deleted file mode 100644 index e70f53940..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/tests/testsuite/yank.rs +++ /dev/null @@ -1,48 +0,0 @@ -//! Tests for the `cargo yank` command. - -use std::fs; - -use cargo_test_support::paths::CargoPathExt; -use cargo_test_support::project; -use cargo_test_support::registry; - -fn setup(name: &str, version: &str) { - let dir = registry::api_path().join(format!("api/v1/crates/{}/{}", name, version)); - dir.mkdir_p(); - fs::write(dir.join("yank"), r#"{"ok": true}"#).unwrap(); -} - -#[cargo_test] -fn simple() { - registry::init(); - setup("foo", "0.0.1"); - - let p = project() - .file( - "Cargo.toml", - r#" - [project] - name = "foo" - version = "0.0.1" - authors = [] - license = "MIT" - description = "foo" - "#, - ) - .file("src/main.rs", "fn main() {}") - .build(); - - p.cargo("yank --vers 0.0.1 --token sekrit").run(); - - p.cargo("yank --undo --vers 0.0.1 --token sekrit") - .with_status(101) - .with_stderr( - " Updating `[..]` index - Unyank foo:0.0.1 -error: failed to undo a yank from the registry at file:///[..] - -Caused by: - EOF while parsing a value at line 1 column 0", - ) - .run(); -} diff --git a/collector/compile-benchmarks/cargo-0.60.0/triagebot.toml b/collector/compile-benchmarks/cargo-0.60.0/triagebot.toml deleted file mode 100644 index d34053084..000000000 --- a/collector/compile-benchmarks/cargo-0.60.0/triagebot.toml +++ /dev/null @@ -1,12 +0,0 @@ -[assign] - -[ping.windows] -message = """\ -Hey Windows Group! This bug has been identified as a good "Windows candidate". -In case it's useful, here are some [instructions] for tackling these sorts of -bugs. Maybe take a look? -Thanks! <3 - -[instructions]: https://rustc-dev-guide.rust-lang.org/notification-groups/windows.html -""" -label = "O-windows"